Merge branch 'develop'
diff --git a/Cargo.toml b/Cargo.toml
index ced41a9..fc2e715 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "tuf"
-version = "0.1.2"
+version = "0.1.3"
authors = [ "heartsucker <heartsucker@autistici.org>" ]
description = "Library for The Update Framework (TUF)"
homepage = "https://github.com/heartsucker/rust-tuf"
diff --git a/README.md b/README.md
index b87f132..fb8f0da 100644
--- a/README.md
+++ b/README.md
@@ -15,6 +15,13 @@
Please make all pull requests to the `develop` branch.
+### Testing
+
+`rust-tuf` uses [`tuf-test-vectors`](https://github.com/heartsucker/tuf-test-vectors)
+to generate integration tests. When adding a complicated feature it may be
+necessary for you to make a separate pull request to that repository to ensure
+the required behaviors are sufficiently tested.
+
### Bugs
This project has a **full disclosure** policy on security related errors. Please
diff --git a/src/cjson.rs b/src/cjson.rs
index e71dd77..470ed70 100644
--- a/src/cjson.rs
+++ b/src/cjson.rs
@@ -1,17 +1,8 @@
-//! Hack-y crate used for development until canonical_json supports serde 0.9
-// TODO remove me
-
use itoa;
use json;
use std::collections::BTreeMap;
use std::io;
-
-// TODO this is actually borked. Beacuse we pass in JSON that has already been parsed
-// something like the sequence"\n" has been converted into an actual new line.
-// Either unescape everthing in the convert function or (ideally) to patch the
-// canonical_json lib. However, this works for testing which is good enough for
-// development for now. (sorry future self)
pub fn canonicalize(jsn: json::Value) -> Result<Vec<u8>, String> {
let converted = convert(jsn)?;
let mut buf = Vec::new();
@@ -24,6 +15,7 @@
Bool(bool),
Null,
Number(Number),
+ // TODO this needs to be &[u8] and not String
Object(BTreeMap<String, Value>),
String(String),
}
diff --git a/src/error.rs b/src/error.rs
index 619152c..f21873a 100644
--- a/src/error.rs
+++ b/src/error.rs
@@ -27,19 +27,15 @@
/// A necessary piece of metadata was missing.
MissingMetadata(Role),
/// The signed metadata had duplicate signatures from a particular key.
- NonUniqueSignatures,
+ NonUniqueSignatures(Role),
/// The metadata did not provide any hash algorithms that this library can calculate.
NoSupportedHashAlgorithms,
/// A piece of metadata exceeded the provided or maximum allowed size.
OversizedMetadata(Role),
- /// The targets exceeded the provided size.
- OversizedTarget,
/// The calculated and provided hashes for the target did not match.
- TargetHashMismatch,
- /// An unknown role type was parsed and rejected.
UnknownRole(String),
- /// The target does not exist in valid metadata.
- UnknownTarget,
+ /// The target does not exist in valid metadata or could not be verified.
+ UnavailableTarget,
/// The role did not have enough signatures to meet the required threshold.
UnmetThreshold(Role),
/// The key type was not supported by this library.
@@ -53,6 +49,7 @@
}
impl Error {
+ /// Helper to include the path that causd the error for FS I/O errors.
pub fn from_io(err: io::Error, path: &Path) -> Error {
Error::Io(format!("Path {:?} : {:?}", path, err))
}
diff --git a/src/lib.rs b/src/lib.rs
index 5e1d3d7..941b574 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -134,6 +134,8 @@
//!
//! ```
+#![deny(missing_docs)]
+
extern crate chrono;
extern crate data_encoding;
extern crate env_logger;
diff --git a/src/main.rs b/src/main.rs
index af4ecc6..13b3edc 100644
--- a/src/main.rs
+++ b/src/main.rs
@@ -41,9 +41,6 @@
} else if let Some(_) = matches.subcommand_matches("init") {
let path = PathBuf::from(matches.value_of("path").unwrap());
cmd_init(&path)
- } else if let Some(_) = matches.subcommand_matches("list") {
- let mut tuf = Tuf::new(config)?;
- cmd_list(&mut tuf)
} else if let Some(_) = matches.subcommand_matches("update") {
let mut tuf = Tuf::new(config)?;
cmd_update(&mut tuf)
@@ -97,7 +94,6 @@
.required(true)
.help("The full (non-local) path of the target to verify")))
.subcommand(SubCommand::with_name("init").about("Initializes a new TUF repo"))
- .subcommand(SubCommand::with_name("list").about("Lists available targets"))
.subcommand(SubCommand::with_name("update").about("Updates metadata from remotes"))
.subcommand(SubCommand::with_name("verify")
.about("Verifies a target")
@@ -116,17 +112,6 @@
Tuf::initialize(local_path)
}
-fn cmd_list(tuf: &mut Tuf) -> Result<(), Error> {
- let mut targets = tuf.list_targets();
- targets.sort();
-
- for target in targets.iter() {
- println!("{}", target);
- }
-
- Ok(())
-}
-
fn cmd_update(tuf: &mut Tuf) -> Result<(), Error> {
tuf.update()
}
diff --git a/src/metadata.rs b/src/metadata.rs
index 6b3e2dc..e90a983 100644
--- a/src/metadata.rs
+++ b/src/metadata.rs
@@ -16,12 +16,13 @@
static HASH_PREFERENCES: &'static [HashType] = &[HashType::Sha512, HashType::Sha256];
-#[derive(Eq, PartialEq, Deserialize, Debug)]
+#[derive(Eq, PartialEq, Deserialize, Debug, Clone)]
pub enum Role {
Root,
Targets,
Timestamp,
Snapshot,
+ TargetsDelegation(String),
}
impl FromStr for Role {
@@ -45,43 +46,56 @@
Role::Targets => write!(f, "{}", "targets"),
Role::Snapshot => write!(f, "{}", "snapshot"),
Role::Timestamp => write!(f, "{}", "timestamp"),
+ Role::TargetsDelegation(ref s) => write!(f, "{}", s),
}
}
}
pub trait RoleType: Debug {
- fn role() -> Role;
+ fn matches(role: &Role) -> bool;
}
#[derive(Debug)]
pub struct Root {}
impl RoleType for Root {
- fn role() -> Role {
- Role::Root
+ fn matches(role: &Role) -> bool {
+ match role {
+ &Role::Root => true,
+ _ => false,
+ }
}
}
#[derive(Debug)]
pub struct Targets {}
impl RoleType for Targets {
- fn role() -> Role {
- Role::Targets
+ fn matches(role: &Role) -> bool {
+ match role {
+ &Role::Targets => true,
+ _ => false,
+ }
}
}
#[derive(Debug)]
pub struct Timestamp {}
impl RoleType for Timestamp {
- fn role() -> Role {
- Role::Timestamp
+ fn matches(role: &Role) -> bool {
+ match role {
+ &Role::Timestamp => true,
+ _ => false,
+ }
}
}
#[derive(Debug)]
pub struct Snapshot {}
impl RoleType for Snapshot {
- fn role() -> Role {
- Role::Snapshot
+ fn matches(role: &Role) -> bool {
+ match role {
+ &Role::Snapshot => true,
+ _ => false,
+ }
}
}
@@ -123,25 +137,14 @@
#[derive(Debug, PartialEq)]
pub struct RootMetadata {
- // TODO consistent_snapshot: bool,
+ consistent_snapshot: bool,
expires: DateTime<UTC>,
pub version: i32,
pub keys: HashMap<KeyId, Key>,
- root: RoleDefinition,
- targets: RoleDefinition,
- timestamp: RoleDefinition,
- snapshot: RoleDefinition,
-}
-
-impl RootMetadata {
- pub fn role_definition<R: RoleType>(&self) -> &RoleDefinition {
- match R::role() {
- Role::Root => &self.root,
- Role::Targets => &self.targets,
- Role::Timestamp => &self.timestamp,
- Role::Snapshot => &self.snapshot,
- }
- }
+ pub root: RoleDefinition,
+ pub targets: RoleDefinition,
+ pub timestamp: RoleDefinition,
+ pub snapshot: RoleDefinition,
}
impl Metadata<Root> for RootMetadata {
@@ -178,6 +181,11 @@
DeserializeError::custom(format!("Field 'version' did not have a valid format: {}", e))
})?;
+ let consistent_snapshot = json::from_value(object.remove("consistent_snapshot")
+ .ok_or_else(|| DeserializeError::custom("Field 'consistent_snapshot' missing"))?).map_err(|e| {
+ DeserializeError::custom(format!("Field 'consistent_snapshot' did not have a valid format: {}", e))
+ })?;
+
let mut roles = object.remove("roles")
.and_then(|v| match v {
json::Value::Object(o) => Some(o),
@@ -210,6 +218,7 @@
})?;
Ok(RootMetadata {
+ consistent_snapshot,
expires: expires,
version: version,
keys: keys,
@@ -258,7 +267,7 @@
}
}
-#[derive(Debug)]
+#[derive(Debug, Clone)]
pub struct TargetsMetadata {
expires: DateTime<UTC>,
pub version: i32,
@@ -456,13 +465,16 @@
/// A public key
#[derive(Clone, PartialEq, Debug, Deserialize)]
pub struct Key {
+ /// The type of keys.
#[serde(rename = "keytype")]
pub typ: KeyType,
+ /// The key's value.
#[serde(rename = "keyval")]
pub value: KeyValue,
}
impl Key {
+ /// Use the given key to verify a signature over a byte array.
pub fn verify(&self,
scheme: &SignatureScheme,
msg: &[u8],
@@ -484,7 +496,9 @@
/// Types of public keys.
#[derive(Clone, PartialEq, Debug)]
pub enum KeyType {
+ /// [Ed25519](https://en.wikipedia.org/wiki/EdDSA#Ed25519) signature scheme.
Ed25519,
+ /// Internal representation of an unsupported key type.
Unsupported(String),
}
@@ -691,7 +705,6 @@
#[derive(Clone, PartialEq, Debug)]
pub struct HashValue(pub Vec<u8>);
-
impl<'de> Deserialize<'de> for HashValue {
fn deserialize<D: Deserializer<'de>>(de: D) -> Result<Self, D::Error> {
match Deserialize::deserialize(de)? {
@@ -706,26 +719,131 @@
}
#[derive(Clone, Debug, Deserialize)]
-// TODO this is a dumb name
pub struct TargetInfo {
pub length: i64,
pub hashes: HashMap<HashType, HashValue>,
- pub custom: Option<HashMap<String, String>>, // TODO json value
+ pub custom: Option<HashMap<String, json::Value>>,
}
#[derive(Clone, PartialEq, Debug, Deserialize)]
pub struct Delegations {
- keys: Vec<KeyId>,
- roles: Vec<DelegatedRole>,
+ pub keys: HashMap<KeyId, Key>,
+ pub roles: Vec<DelegatedRole>,
}
-#[derive(Clone, PartialEq, Debug, Deserialize)]
+#[derive(Clone, PartialEq, Debug)]
pub struct DelegatedRole {
- name: String,
- key_ids: Vec<KeyId>,
- threshold: i32,
- // TODO path_hash_prefixes
- paths: Vec<String>,
+ pub name: String,
+ pub key_ids: Vec<KeyId>,
+ pub threshold: i32,
+ pub terminating: bool,
+ paths: TargetPaths,
+}
+
+impl DelegatedRole {
+ pub fn could_have_target(&self, target: &str) -> bool {
+ match self.paths {
+ TargetPaths::Patterns(ref patterns) => {
+ for path in patterns.iter() {
+ let path_str = path.as_str();
+ if path_str == target {
+ return true
+ } else if path_str.ends_with("/") && target.starts_with(path_str) {
+ return true
+ }
+ }
+ return false
+ }
+ }
+ }
+}
+
+impl<'de> Deserialize<'de> for DelegatedRole {
+ fn deserialize<D: Deserializer<'de>>(de: D) -> Result<Self, D::Error> {
+ if let json::Value::Object(mut object) = Deserialize::deserialize(de)? {
+ match (object.remove("name"), object.remove("keyids"),
+ object.remove("threshold"), object.remove("terminating"),
+ object.remove("paths"), object.remove("path_hash_prefixes")) {
+ (Some(n), Some(ks), Some(t), Some(term), Some(ps), None) => {
+ let name =
+ json::from_value(n).map_err(|e| {
+ DeserializeError::custom(format!("Failed at name: {}", e))
+ })?;
+
+ let key_ids =
+ json::from_value(ks).map_err(|e| {
+ DeserializeError::custom(format!("Failed at keyids: {}", e))
+ })?;
+
+ let threshold =
+ json::from_value(t).map_err(|e| {
+ DeserializeError::custom(format!("Failed at treshold: {}", e))
+ })?;
+
+ let terminating =
+ json::from_value(term).map_err(|e| {
+ DeserializeError::custom(format!("Failed at treshold: {}", e))
+ })?;
+
+ let paths: Vec<String> =
+ json::from_value(ps).map_err(|e| {
+ DeserializeError::custom(format!("Failed at treshold: {}", e))
+ })?;
+
+ Ok(DelegatedRole {
+ name: name,
+ key_ids: key_ids,
+ threshold: threshold,
+ terminating: terminating,
+ paths: TargetPaths::Patterns(paths),
+ })
+ }
+ (_, _, _, _, Some(_), Some(_)) =>
+ Err(DeserializeError::custom("Fields 'paths' or 'pash_hash_prefixes' are mutually exclusive".to_string())),
+ (_, _, _, _, _, Some(_)) =>
+ Err(DeserializeError::custom("'pash_hash_prefixes' is not yet supported".to_string())),
+ _ => Err(DeserializeError::custom("Signature missing fields".to_string())),
+ }
+ } else {
+ Err(DeserializeError::custom("Delegated role was not an object".to_string()))
+ }
+ }
+}
+
+
+#[derive(Clone, PartialEq, Debug)]
+pub enum TargetPaths {
+ Patterns(Vec<String>),
+ // TODO HashPrefixes(Vec<String>),
+}
+
+#[cfg(test)]
+mod test {
+ use super::*;
+
+ #[test]
+ fn delegated_role_could_have_target() {
+ let vectors = vec![
+ ("foo", "foo", true),
+ ("foo/", "foo/bar", true),
+ ("foo", "foo/bar", false),
+ ("foo/bar", "foo/baz", false),
+ ("foo/bar/", "foo/bar/baz", true),
+ ];
+
+ for &(prefix, target, success) in vectors.iter() {
+ let delegation = DelegatedRole {
+ name: "".to_string(),
+ key_ids: Vec::new(),
+ threshold: 1,
+ terminating: false,
+ paths: TargetPaths::Patterns(vec![prefix.to_string()]),
+ };
+
+ assert!(!success ^ delegation.could_have_target(target),
+ format!("Prefix {} should have target {}: {}", prefix, target, success))
+ };
+ }
}
diff --git a/src/tuf.rs b/src/tuf.rs
index 336688e..fcc7fa6 100644
--- a/src/tuf.rs
+++ b/src/tuf.rs
@@ -18,10 +18,12 @@
HashValue, KeyId, Key};
use util;
-
+/// A remote TUF repository.
#[derive(Debug)]
pub enum RemoteRepo {
+ /// An untrusted repository on the same file sytem. Primarily used for testing.
File(PathBuf),
+ /// A repository reachable via HTTP/S.
Http(Url),
}
@@ -48,20 +50,26 @@
}
impl Tuf {
- /// Create a `Tuf` struct from an existing repo with the initial root keys pinned. This also
- /// calls `initialize` to ensure the needed paths exist.
+ /// Create a `Tuf` struct from an existing repo with the initial root keys pinned.
pub fn from_root_keys(root_keys: Vec<Key>, config: Config) -> Result<Self, Error> {
- Self::initialize(&config.local_path)?;
+ if config.init {
+ Self::initialize(&config.local_path)?;
+ }
let root = {
let fetch_type = &FetchType::Cache(config.local_path.clone());
match Self::read_root_with_keys(fetch_type, &config.http_client, &root_keys) {
Ok(modified_root) => {
- Self::get_meta_num::<Root, RootMetadata, File>(fetch_type,
+ Self::get_metadata::<Root, RootMetadata, File>(fetch_type,
&config.http_client,
- 1,
- &modified_root,
+ &Role::Root,
+ Some(1),
+ modified_root.root.threshold,
+ &modified_root.root.key_ids,
+ &modified_root.keys,
+ None,
+ None,
&mut None)?
}
Err(e) => {
@@ -69,10 +77,15 @@
let fetch_type = &config.remote.as_fetch();
let modified_root =
Self::read_root_with_keys(fetch_type, &config.http_client, &root_keys)?;
- Self::get_meta_num::<Root, RootMetadata, File>(fetch_type,
+ Self::get_metadata::<Root, RootMetadata, File>(fetch_type,
&config.http_client,
- 1,
- &modified_root,
+ &Role::Root,
+ Some(1),
+ modified_root.root.threshold,
+ &modified_root.root.key_ids,
+ &modified_root.keys,
+ None,
+ None,
&mut None)?
}
}
@@ -93,17 +106,24 @@
}
/// Create a `Tuf` struct from a new repo. Must contain the `root.json`. The root is trusted
- /// with only verification on consistency, not authenticity. This call also calls `initialize`
- /// to ensure the needed paths exist.
+ /// with only verification on consistency, not authenticity.
pub fn new(config: Config) -> Result<Self, Error> {
- Self::initialize(&config.local_path)?;
+ if config.init {
+ Self::initialize(&config.local_path)?;
+ }
let root = {
let fetch_type = &FetchType::Cache(config.local_path.clone());
let root = Self::unverified_read_root(fetch_type, &config.http_client)?;
Self::get_metadata::<Root, RootMetadata, File>(fetch_type,
&config.http_client,
- &root,
+ &Role::Root,
+ None,
+ root.root.threshold,
+ &root.root.key_ids,
+ &root.keys,
+ None,
+ None,
&mut None)?
};
@@ -201,10 +221,15 @@
(None, None)
};
- let root = match Self::get_meta_num::<Root, RootMetadata, File>(fetch_type,
+ let root = match Self::get_metadata::<Root, RootMetadata, File>(fetch_type,
&self.http_client,
- i,
- &self.root,
+ &Role::Root,
+ Some(i),
+ self.root.root.threshold,
+ &self.root.root.key_ids,
+ &self.root.keys,
+ None,
+ None,
&mut out) {
Ok(root) => root,
Err(e) => {
@@ -223,10 +248,15 @@
// verify root again against itself (for cross signing)
// TODO this is not the most efficient way to do it, but it works
- match Self::get_meta_num::<Root, RootMetadata, File>(fetch_type,
+ match Self::get_metadata::<Root, RootMetadata, File>(fetch_type,
&self.http_client,
- i,
- &root,
+ &Role::Root,
+ Some(i),
+ root.root.threshold,
+ &root.root.key_ids,
+ &root.keys,
+ None,
+ None,
&mut None::<File>) {
Ok(root_again) => {
if root != root_again {
@@ -265,6 +295,7 @@
};
// set to None to untrust old metadata
+ // TODO delete old metadata
// TODO check that these resets are in line with the Mercury paper
self.targets = None;
self.timestamp = None;
@@ -287,7 +318,13 @@
let timestamp =
Self::get_metadata::<Timestamp, TimestampMetadata, File>(fetch_type,
&self.http_client,
- &self.root,
+ &Role::Timestamp,
+ None,
+ self.root.timestamp.threshold,
+ &self.root.timestamp.key_ids,
+ &self.root.keys,
+ None,
+ None,
&mut out)?;
match self.timestamp {
@@ -380,12 +417,15 @@
(None, None)
};
- let snapshot = Self::get_meta_prefix::<Snapshot,
+ let snapshot = Self::get_metadata::<Snapshot,
SnapshotMetadata,
File>(fetch_type,
&self.http_client,
- "",
- &self.root,
+ &Role::Snapshot,
+ None,
+ self.root.snapshot.threshold,
+ &self.root.snapshot.key_ids,
+ &self.root.keys,
Some(meta.length),
Some((&hash_alg, &expected_hash.0)),
&mut out)?;
@@ -485,13 +525,16 @@
(None, None)
};
- let targets = Self::get_meta_prefix::<Targets, TargetsMetadata, File>(fetch_type,
- &self.http_client,
- "",
- &self.root,
- meta.length,
- hash_data,
- &mut out)?;
+ let targets = Self::get_metadata::<Targets, TargetsMetadata, File>(fetch_type,
+ &self.http_client,
+ &Role::Targets,
+ None,
+ self.root.targets.threshold,
+ &self.root.targets.key_ids,
+ &self.root.keys,
+ meta.length,
+ hash_data,
+ &mut out)?;
// TODO ? check downloaded version matches what was in the snapshot.json
@@ -532,33 +575,12 @@
}
fn get_metadata<R: RoleType, M: Metadata<R>, W: Write>(fetch_type: &FetchType,
- http_client: &Client,
- root: &RootMetadata,
- mut out: &mut Option<W>)
- -> Result<M, Error> {
- Self::get_meta_prefix(fetch_type, http_client, "", root, None, None, &mut out)
- }
-
- fn get_meta_num<R: RoleType, M: Metadata<R>, W: Write>(fetch_type: &FetchType,
- http_client: &Client,
- num: i32,
- root: &RootMetadata,
- mut out: &mut Option<W>)
- -> Result<M, Error> {
- // TODO this should check that the metadata version == num
- Self::get_meta_prefix(fetch_type,
- http_client,
- &format!("{}.", num),
- root,
- None,
- None,
- &mut out)
- }
-
- fn get_meta_prefix<R: RoleType, M: Metadata<R>, W: Write>(fetch_type: &FetchType,
http_client: &Client,
- prefix: &str,
- root: &RootMetadata,
+ role: &Role,
+ metadata_version: Option<i32>,
+ threshold: i32,
+ trusted_ids: &[KeyId],
+ available_keys: &HashMap<KeyId, Key>,
size: Option<i64>,
hash_data: Option<(&HashType,
&[u8])>,
@@ -566,12 +588,14 @@
-> Result<M, Error> {
debug!("Loading metadata from {:?}", fetch_type);
+ let metadata_version_str = metadata_version.map(|x| format!("{}.", x))
+ .unwrap_or_else(|| "".to_string());
let buf: Vec<u8> = match fetch_type {
&FetchType::Cache(ref local_path) => {
let path = local_path.join("metadata")
.join("current")
- .join(format!("{}{}.json", prefix, R::role()));
+ .join(format!("{}{}.json", metadata_version_str, role));
info!("Reading metadata from local path: {:?}", path);
let mut file = File::open(path.clone()).map_err(|e| Error::from_io(e, &path))?;
@@ -585,7 +609,7 @@
buf
}
&FetchType::File(ref path) => {
- let path = path.join(format!("{}{}.json", prefix, R::role()));
+ let path = path.join(format!("{}{}.json", metadata_version_str, role));
info!("Reading metadata from path: {:?}", path);
let mut file = File::open(path.clone()).map_err(|e| Error::from_io(e, &path))?;
@@ -599,7 +623,7 @@
buf
}
&FetchType::Http(ref url) => {
- let url = url.join(&format!("{}{}.json", prefix, R::role()))?;
+ let url = url.join(&format!("{}{}.json", metadata_version_str, role))?;
let mut resp = http_client.get(url).send()?;
let mut buf = Vec::new();
@@ -613,13 +637,13 @@
};
let signed = json::from_slice(&buf)?;
- let safe_bytes = Self::verify_meta::<R>(signed, root)?;
+ let safe_bytes = Self::verify_meta::<R>(signed, role, threshold, trusted_ids, available_keys)?;
let meta: M = json::from_slice(&safe_bytes)?;
// TODO this will be a problem with updating root metadata and this function probably
// needs an arg like `allow_expired`.
if meta.expires() <= &UTC::now() {
- return Err(Error::ExpiredMetadata(R::role()));
+ return Err(Error::ExpiredMetadata(role.clone()));
}
match out {
@@ -717,13 +741,14 @@
}
fn verify_meta<R: RoleType>(signed: SignedMetadata<R>,
- root: &RootMetadata)
+ role: &Role,
+ threshold: i32,
+ trusted_ids: &[KeyId],
+ available_keys: &HashMap<KeyId, Key>)
-> Result<Vec<u8>, Error> {
let bytes =
cjson::canonicalize(signed.signed).map_err(|err| Error::CanonicalJsonError(err))?;
- let role = root.role_definition::<R>();
-
let unique_count = signed.signatures
.iter()
.map(|s| &s.key_id)
@@ -731,12 +756,11 @@
.len();
if signed.signatures.len() != unique_count {
- return Err(Error::NonUniqueSignatures);
+ return Err(Error::NonUniqueSignatures(role.clone()));
}
- let keys = role.key_ids
- .iter()
- .map(|id| (id, root.keys.get(id)))
+ let keys = trusted_ids.iter()
+ .map(|id| (id, available_keys.get(id)))
.fold(HashMap::new(), |mut m, (id, k)| {
if let Some(key) = k {
m.insert(id, key);
@@ -746,7 +770,7 @@
m
});
- if role.threshold <= 0 {
+ if threshold <= 0 {
return Err(Error::VerificationFailure("Threshold not >= 1".into()));
}
@@ -754,7 +778,7 @@
for sig in signed.signatures.iter() {
if let Some(key) = keys.get(&sig.key_id) {
debug!("Verifying role {:?} with key ID {:?}",
- R::role(),
+ role,
sig.key_id);
match key.verify(&sig.method, &bytes, &sig.sig) {
@@ -764,28 +788,14 @@
}
Err(e) => warn!("Failed to verify with key ID {:?}: {:?}", &sig.key_id, e),
}
- if valid_sigs == role.threshold {
+ if valid_sigs == threshold {
return Ok(bytes);
}
}
}
- info!("Threshold not met: {}/{}", valid_sigs, role.threshold);
- return Err(Error::UnmetThreshold(R::role()));
- }
-
- /// Lists all targets that are currently available. If a target is missing, it means the
- /// metadata chain that leads to it cannot be verified, and the target is therefore untrusted.
- // TODO stronger return type
- pub fn list_targets(&self) -> Vec<String> {
- match self.targets {
- Some(ref targets) => {
- let mut res = targets.targets.keys().cloned().collect::<Vec<String>>();
- res.sort();
- res
- }
- None => Vec::new(),
- }
+ info!("Threshold not met: {}/{}", valid_sigs, threshold);
+ return Err(Error::UnmetThreshold(role.clone()));
}
/// Reads a target from local storage or fetches it from a remote repository. Verifies the
@@ -793,124 +803,120 @@
/// be verified.
// TODO ? stronger input type
pub fn fetch_target(&self, target: &str) -> Result<PathBuf, Error> {
- let target_meta = match self.targets {
- Some(ref targets) => {
- targets.targets
- .get(target)
- .ok_or_else(|| Error::UnknownTarget)?
- }
+ let metadata_chain = match self.targets {
+ Some(ref targets) => TargetPathIterator::new(&self, targets.clone(), target),
None => return Err(Error::MissingMetadata(Role::Targets)),
};
+ for ref targets_meta in metadata_chain {
+ let target_meta = match targets_meta.targets.get(target) {
+ Some(meta) => meta,
+ None => continue,
+ };
- let (hash_alg, expected_hash): (&HashType, HashValue) = HashType::preferences().iter()
- .fold(None, |res, pref| {
- res.or_else(|| if let Some(hash) = target_meta.hashes.get(&pref) {
- Some((pref, hash.clone()))
- } else {
- None
+ let (hash_alg, expected_hash): (&HashType, HashValue) = HashType::preferences().iter()
+ .fold(None, |res, pref| {
+ res.or_else(|| if let Some(hash) = target_meta.hashes.get(&pref) {
+ Some((pref, hash.clone()))
+ } else {
+ None
+ })
})
- })
- .ok_or_else(|| Error::NoSupportedHashAlgorithms)?;
+ .ok_or_else(|| Error::NoSupportedHashAlgorithms)?;
- // TODO correctly split path
- let path = self.local_path.join("targets").join(util::url_path_to_os_path(target)?);
- info!("reading target from local path: {:?}", path);
+ // TODO correctly split path
+ let path = self.local_path.join("targets").join(util::url_path_to_os_path(target)?);
+ info!("reading target from local path: {:?}", path);
- if path.exists() {
- let mut file = File::open(path.clone()).map_err(|e| Error::from_io(e, &path))?;
- Self::read_and_verify(&mut file,
- &mut None::<&mut File>,
- Some(target_meta.length),
- Some((&hash_alg, &expected_hash.0)))?;
- let _ = file.seek(SeekFrom::Start(0))?;
- return Ok(path);
- } else {
- let (out, out_path) = self.temp_file()?;
+ if path.exists() {
+ let mut file = File::open(path.clone()).map_err(|e| Error::from_io(e, &path))?;
+ Self::read_and_verify(&mut file,
+ &mut None::<&mut File>,
+ Some(target_meta.length),
+ Some((&hash_alg, &expected_hash.0)))?;
+ let _ = file.seek(SeekFrom::Start(0))?;
+ return Ok(path);
+ } else {
+ let (out, out_path) = self.temp_file()?;
- match self.remote {
- RemoteRepo::File(ref path) => {
- let mut path = path.clone();
- path.extend(util::url_path_to_path_components(target)?);
- let mut file = File::open(path.clone()).map_err(|e| Error::from_io(e, &path))?;
+ match self.remote {
+ RemoteRepo::File(ref path) => {
+ let mut path = path.clone();
+ path.extend(util::url_path_to_path_components(target)?);
+ let mut file = File::open(path.clone()).map_err(|e| Error::from_io(e, &path))?;
- match Self::read_and_verify(&mut file,
- &mut Some(out),
- Some(target_meta.length),
- Some((&hash_alg, &expected_hash.0))) {
- Ok(()) => {
- // TODO ensure intermediate directories exist
- let mut storage_path = self.local_path.join("targets");
- storage_path.extend(util::url_path_to_path_components(target)?);
+ match Self::read_and_verify(&mut file,
+ &mut Some(out),
+ Some(target_meta.length),
+ Some((&hash_alg, &expected_hash.0))) {
+ Ok(()) => {
+ let mut storage_path = self.local_path.join("targets");
+ storage_path.extend(util::url_path_to_path_components(target)?);
- {
- let parent = storage_path.parent()
- .ok_or_else(|| Error::Generic("Path had no parent".to_string()))?;
+ {
+ let parent = storage_path.parent()
+ .ok_or_else(|| Error::Generic("Path had no parent".to_string()))?;
- DirBuilder::new()
- .recursive(true)
- .create(parent)?;
+ DirBuilder::new()
+ .recursive(true)
+ .create(parent)?;
+ }
+
+ fs::rename(out_path, storage_path.clone())?;
+ return Ok(storage_path)
}
-
- fs::rename(out_path, storage_path.clone())?;
- Ok(storage_path)
- }
- Err(e) => {
- match fs::remove_file(out_path.clone()) {
- Ok(_) => Err(e),
- Err(e) => {
- warn!("Error removing temp file {:?}: {}", out_path, e);
- Err(Error::from(e))
+ Err(e) => {
+ match fs::remove_file(out_path.clone()) {
+ Ok(_) => warn!("Error verifying target: {:?}", e),
+ Err(e) => warn!("Error removing temp file {:?}: {}", out_path, e),
}
}
}
}
- }
- RemoteRepo::Http(ref url) => {
- let mut url = url.clone();
- {
- url.path_segments_mut()
- .map_err(|_| Error::Generic("URL path could not be mutated".to_string()))?
- .extend(util::url_path_to_path_components(&target)?);
- }
- let url = util::url_to_hyper_url(&url)?;
- let mut resp = self.http_client.get(url).send()?;
-
- match Self::read_and_verify(&mut resp,
- &mut Some(out),
- Some(target_meta.length),
- Some((&hash_alg, &expected_hash.0))) {
- Ok(()) => {
- // TODO this isn't windows friendly
- // TODO ensure intermediate directories exist
- let mut storage_path = self.local_path.join("targets");
- storage_path.extend(util::url_path_to_path_components(target)?);
-
- {
- let parent = storage_path.parent()
- .ok_or_else(|| Error::Generic("Path had no parent".to_string()))?;
-
- DirBuilder::new()
- .recursive(true)
- .create(parent)?;
- }
-
- fs::rename(out_path, storage_path.clone())?;
-
- Ok(storage_path)
+ RemoteRepo::Http(ref url) => {
+ let mut url = url.clone();
+ {
+ url.path_segments_mut()
+ .map_err(|_| Error::Generic("URL path could not be mutated".to_string()))?
+ .extend(util::url_path_to_path_components(&target)?);
}
- Err(e) => {
- match fs::remove_file(out_path.clone()) {
- Ok(_) => Err(e),
- Err(e) => {
- warn!("Error removing temp file {:?}: {}", out_path, e);
- Err(Error::from(e))
+ let url = util::url_to_hyper_url(&url)?;
+ let mut resp = self.http_client.get(url).send()?;
+
+ match Self::read_and_verify(&mut resp,
+ &mut Some(out),
+ Some(target_meta.length),
+ Some((&hash_alg, &expected_hash.0))) {
+ Ok(()) => {
+ // TODO this isn't windows friendly
+ let mut storage_path = self.local_path.join("targets");
+ storage_path.extend(util::url_path_to_path_components(target)?);
+
+ {
+ let parent = storage_path.parent()
+ .ok_or_else(|| Error::Generic("Path had no parent".to_string()))?;
+
+ DirBuilder::new()
+ .recursive(true)
+ .create(parent)?;
+ }
+
+ fs::rename(out_path, storage_path.clone())?;
+
+ return Ok(storage_path)
+ }
+ Err(e) => {
+ match fs::remove_file(out_path.clone()) {
+ Ok(_) => warn!("Error verifying target: {:?}", e),
+ Err(e) => warn!("Error removing temp file {:?}: {}", out_path, e),
}
}
}
}
}
}
- }
+ };
+
+ Err(Error::UnavailableTarget)
}
fn read_and_verify<R: Read, W: Write>(input: &mut R,
@@ -951,7 +957,7 @@
if *bytes_left == 0 {
break;
} else if *bytes_left < 0 {
- return Err(Error::OversizedTarget);
+ return Err(Error::UnavailableTarget);
}
}
None => (),
@@ -966,7 +972,7 @@
match (generated_hash, hash_data) {
(Some(generated_hash), Some((_, expected_hash))) if generated_hash.as_ref() !=
expected_hash => {
- Err(Error::TargetHashMismatch)
+ Err(Error::UnavailableTarget)
}
// this should never happen, so err if it does for safety
(Some(_), None) => {
@@ -991,8 +997,8 @@
pub struct Config {
remote: RemoteRepo,
local_path: PathBuf,
- http_client: Client,
- // TODO add `init: bool` to specify whether or not to create dir structure
+ http_client: Client,
+ init: bool,
}
impl Config {
@@ -1008,6 +1014,7 @@
remote: Option<RemoteRepo>,
local_path: Option<PathBuf>,
http_client: Option<Client>,
+ init: bool,
}
impl ConfigBuilder {
@@ -1017,6 +1024,7 @@
remote: None,
local_path: None,
http_client: None,
+ init: true,
}
}
@@ -1038,6 +1046,12 @@
self
}
+ /// Where or not to initialize the local directory structures.
+ pub fn init(mut self, init: bool) -> Self {
+ self.init = init;
+ self
+ }
+
/// Verify the configuration.
pub fn finish(self) -> Result<Config, Error> {
let remote = self.remote
@@ -1050,6 +1064,7 @@
remote: remote,
local_path: local_path,
http_client: self.http_client.unwrap_or_else(|| Client::new()),
+ init: self.init,
})
}
}
@@ -1070,3 +1085,132 @@
}
}
}
+
+
+struct TargetPathIterator<'a> {
+ tuf: &'a Tuf,
+ targets: TargetsMetadata,
+ target: &'a str,
+ terminate: bool,
+ targets_checked: bool,
+ roles_index: usize,
+ sub_iter: Option<Box<TargetPathIterator<'a>>>,
+}
+
+impl<'a> TargetPathIterator<'a> {
+ fn new(tuf: &'a Tuf, targets: TargetsMetadata, target: &'a str) -> Self {
+ TargetPathIterator {
+ tuf: tuf,
+ targets: targets,
+ target: target,
+ terminate: false,
+ targets_checked: false,
+ roles_index: 0,
+ sub_iter: None,
+ }
+ }
+}
+
+impl<'a> Iterator for TargetPathIterator<'a> {
+ type Item = TargetsMetadata;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ if self.terminate {
+ return None
+ }
+
+ match self.targets.targets.get(self.target) {
+ Some(_) if !self.targets_checked => {
+ self.targets_checked = true;
+ Some(self.targets.clone())
+ },
+ _ => {
+ match self.targets.delegations {
+ Some(ref delegations) => {
+ for delegation in delegations.roles.iter().skip(self.roles_index) {
+ if delegation.terminating {
+ self.terminate = true;
+ }
+
+ self.roles_index += 1;
+
+ let (version, length, hash_data) = match self.tuf.snapshot {
+ Some(ref snapshot) => {
+ match snapshot.meta.get(&format!("{}.json", delegation.name)) {
+ Some(meta) => {
+ let hash_data = match meta.hashes {
+ Some(ref hashes) => {
+ match HashType::preferences().iter()
+ .fold(None, |res, pref| {
+ res.or_else(|| if let Some(hash) = hashes.get(&pref) {
+ Some((pref, hash))
+ } else {
+ None
+ })
+ }) {
+ Some(pair) => Some(pair.clone()),
+ None => {
+ warn!("No suitable hash algorithms. Refusing to trust metadata: {:?}",
+ delegation.name);
+ continue
+ }
+ }
+ },
+ None => None,
+ };
+ (meta.version, meta.length, hash_data)
+ },
+ None => continue // TODO err msg
+ }
+ }
+ None => continue // TODO err msg
+ };
+
+ // TODO extract hash/len from snapshot and use in verification
+ if delegation.could_have_target(&self.target) {
+ match Tuf::get_metadata::<Targets,
+ TargetsMetadata,
+ File>(&self.tuf.remote.as_fetch(),
+ &self.tuf.http_client,
+ &Role::TargetsDelegation(delegation.name.clone()),
+ None,
+ delegation.threshold,
+ &delegation.key_ids,
+ &delegations.keys,
+ length,
+ hash_data.map(|(a, h)| (a, &*h.0)),
+ &mut None) {
+ Ok(meta) => {
+ if meta.version != version {
+ warn!("The metadata for {:?} had version {} but snapshot reported {}",
+ delegation.name, meta.version, version);
+ continue
+ }
+
+ let mut iter = TargetPathIterator::new(&self.tuf,
+ meta.clone(),
+ self.target);
+ let res = iter.next();
+ if delegation.terminating && res.is_none() {
+ return None
+ } else if res.is_some() {
+ self.sub_iter = Some(Box::new(iter));
+ return res
+ } else {
+ continue
+ }
+ }
+ Err(e) => warn!("Error fetching metadata: {:?}", e),
+ }
+ } else {
+ continue
+ }
+ }
+ return None
+ },
+ None => return None,
+ }
+ }
+ }
+ }
+}
diff --git a/tests/tuf-test-vectors b/tests/tuf-test-vectors
index 2002c3f..fb35eeb 160000
--- a/tests/tuf-test-vectors
+++ b/tests/tuf-test-vectors
@@ -1 +1 @@
-Subproject commit 2002c3fdb63ff06b34192f807035e1ceb2adddeb
+Subproject commit fb35eeb1c8c07a76274111d49228acc4a04bef3c
diff --git a/tests/vectors.rs b/tests/vectors.rs
index 4766dcd..3640495 100644
--- a/tests/vectors.rs
+++ b/tests/vectors.rs
@@ -7,7 +7,7 @@
extern crate tuf;
use data_encoding::HEXLOWER;
-use std::fs::{self, File, DirBuilder};
+use std::fs::File;
use std::io::Read;
use std::path::PathBuf;
use tempdir::TempDir;
@@ -101,7 +101,6 @@
match (Tuf::from_root_keys(root_keys, config), &test_vector.error) {
(Ok(ref tuf), &None) => {
- assert_eq!(tuf.list_targets(), vec!["targets/file.txt".to_string()]);
// first time pulls remote
assert_eq!(tuf.fetch_target("targets/file.txt").map(|_| ()), Ok(()));
assert!(temp_path.join("targets").join("targets").join("file.txt").exists());
@@ -111,12 +110,12 @@
(Ok(ref tuf), &Some(ref err)) if err == &"TargetHashMismatch".to_string() => {
assert_eq!(tuf.fetch_target("targets/file.txt").map(|_| ()),
- Err(Error::TargetHashMismatch));
+ Err(Error::UnavailableTarget));
}
(Ok(ref tuf), &Some(ref err)) if err == &"OversizedTarget".to_string() => {
assert_eq!(tuf.fetch_target("targets/file.txt").map(|_| ()),
- Err(Error::OversizedTarget));
+ Err(Error::UnavailableTarget));
}
(Err(Error::ExpiredMetadata(ref role)), &Some(ref err))
@@ -159,6 +158,22 @@
format!("Role: {}, err: {}", role, err))
}
+ (Err(Error::NonUniqueSignatures(ref role)), &Some(ref err)) if err.starts_with("NonUniqueSignatures::") => {
+ assert!(err.to_lowercase()
+ .ends_with(role.to_string().as_str()),
+ format!("Role: {}, err: {}", role, err))
+ }
+
+ (Ok(ref tuf), &Some(ref err)) if err == &"UnavailableTarget".to_string() => {
+ assert_eq!(tuf.fetch_target("targets/file.txt").map(|_| ()),
+ Err(Error::UnavailableTarget));
+ }
+
+ (Ok(ref tuf), &Some(ref err))
+ if err == &"UnmetThreshold::Delegation".to_string() => {
+ assert_eq!(tuf.fetch_target("targets/file.txt").map(|_| ()), Err(Error::UnavailableTarget));
+ }
+
x => panic!("Unexpected failures: {:?}", x),
}
}
@@ -173,25 +188,15 @@
run_test_vector("002")
}
-#[ignore]
-fn vector_003() {
- run_test_vector("003")
-}
-
-#[ignore]
-fn vector_004() {
- run_test_vector("004")
-}
+// TODO 003
+// TODO 004
#[test]
fn vector_005() {
run_test_vector("005")
}
-#[ignore]
-fn vector_006() {
- run_test_vector("006")
-}
+// TODO 006
#[test]
fn vector_007() {
@@ -293,15 +298,8 @@
run_test_vector("026")
}
-#[ignore]
-fn vector_027() {
- run_test_vector("027")
-}
-
-#[ignore]
-fn vector_028() {
- run_test_vector("028")
-}
+// TODO 027
+// TODO 028
#[test]
fn vector_029() {
@@ -332,3 +330,82 @@
fn vector_034() {
run_test_vector("034")
}
+
+// TODO 035
+// TODO 036
+
+#[test]
+fn vector_037() {
+ run_test_vector("037")
+}
+
+#[test]
+fn vector_038() {
+ run_test_vector("038")
+}
+
+#[test]
+fn vector_039() {
+ run_test_vector("039")
+}
+
+#[test]
+fn vector_040() {
+ run_test_vector("040")
+}
+
+// TODO 041
+// TODO 042
+// TODO 043
+// TODO 044
+
+#[test]
+fn vector_045() {
+ run_test_vector("045")
+}
+
+#[test]
+fn vector_046() {
+ run_test_vector("046")
+}
+
+#[test]
+fn vector_047() {
+ run_test_vector("047")
+}
+
+#[test]
+fn vector_048() {
+ run_test_vector("048")
+}
+
+#[test]
+fn vector_049() {
+ run_test_vector("049")
+}
+
+
+#[test]
+fn vector_050() {
+ run_test_vector("050")
+}
+
+#[test]
+fn vector_051() {
+ run_test_vector("051")
+}
+
+#[test]
+fn vector_052() {
+ run_test_vector("052")
+}
+
+#[test]
+fn vector_053() {
+ run_test_vector("053")
+}
+
+#[test]
+fn vector_054() {
+ run_test_vector("054")
+}