Merge branch 'develop'
diff --git a/.travis.yml b/.travis.yml
index cef597e..8a01660 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -3,11 +3,17 @@
dist: trust
language: rust
+cache: cargo
rust:
- stable
- beta
- nightly
+env:
+ global:
+ # for codecov.io
+ - RUSTFLAGS="-C link-dead-code"
+
install:
- sudo add-apt-repository -y ppa:fkrull/deadsnakes
- sudo apt-get update
@@ -22,3 +28,22 @@
- RUST_BACKTRACE=full cargo build --verbose --features=cli
- ./tests/tuf-test-vectors/server.py --path tuf &>/dev/null &
- RUST_BACKTRACE=full cargo test --verbose --features=cli
+
+after_failure:
+ - cat Cargo.lock
+
+after_success: |
+ sudo apt-get install -y libcurl4-openssl-dev libelf-dev libdw-dev cmake gcc binutils-dev &&
+ wget https://github.com/SimonKagstrom/kcov/archive/master.tar.gz &&
+ tar xzf master.tar.gz &&
+ cd kcov-master &&
+ mkdir build &&
+ cd build &&
+ cmake .. &&
+ make &&
+ sudo make install &&
+ cd ../.. &&
+ rm -rf kcov-master &&
+ for file in target/debug/*-*[^\.d]; do mkdir -p "target/cov/$(basename $file)"; kcov --exclude-pattern=/.cargo,/usr/lib --verify "target/cov/$(basename $file)" "$file"; done &&
+ bash <(curl -s https://codecov.io/bash) &&
+ echo "Uploaded code coverage"
diff --git a/Cargo.toml b/Cargo.toml
index bf85e91..d1ffee2 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "tuf"
-version = "0.1.4"
+version = "0.1.5"
authors = [ "heartsucker <heartsucker@autistici.org>" ]
description = "Library for The Update Framework (TUF)"
homepage = "https://github.com/heartsucker/rust-tuf"
@@ -14,6 +14,7 @@
[badges]
travis-ci = { repository = "heartsucker/rust-tuf", branch = "master" }
appveyor = { repository = "heartsucker/rust-tuf", branch = "master", service = "github" }
+codecov = { repository = "heartsucker/rust-tuf", branch = "master", service = "github" }
[lib]
name = "tuf"
diff --git a/README.md b/README.md
index fb8f0da..4d6bf1a 100644
--- a/README.md
+++ b/README.md
@@ -1,6 +1,6 @@
# rust-tuf
-[![Travis build Status](https://travis-ci.org/heartsucker/rust-tuf.svg?branch=master)](https://travis-ci.org/heartsucker/rust-tuf) [![Appveyor build status](https://ci.appveyor.com/api/projects/status/kfyvpkdvn5ap7dqc?svg=true)](https://ci.appveyor.com/project/heartsucker/rust-tuf)
+[![Travis build Status](https://travis-ci.org/heartsucker/rust-tuf.svg?branch=master)](https://travis-ci.org/heartsucker/rust-tuf) [![Appveyor build status](https://ci.appveyor.com/api/projects/status/kfyvpkdvn5ap7dqc/branch/master?svg=true)](https://ci.appveyor.com/project/heartsucker/rust-tuf/branch/master)[![codecov](https://codecov.io/gh/heartsucker/rust-tuf/branch/master/graph/badge.svg)](https://codecov.io/gh/heartsucker/rust-tuf)
A Rust implementation of [The Update Framework (TUF)](https://theupdateframework.github.io/).
diff --git a/scripts/test-repo.sh b/scripts/test-repo.sh
new file mode 100755
index 0000000..953c5dc
--- /dev/null
+++ b/scripts/test-repo.sh
@@ -0,0 +1,24 @@
+#!/bin/bash
+set -ue
+
+'Runs the CLI tool against a repo and prints the state after.'
+
+cd "$(dirname "$(readlink -f "$0")")/.."
+
+declare -r bin="target/debug/tuf"
+temp=$(mktemp -d)
+declare -r temp
+declare -r repo="tests/tuf-test-vectors/tuf/$1/repo"
+
+cargo build --features=cli
+
+set +e
+export RUST_LOG='debug'
+
+"$bin" -p "$temp" -f "$repo" init
+cp "$repo"/root.json "$temp/metadata/current"
+"$bin" -p "$temp" -f "$repo" update
+"$bin" -p "$temp" -f "$repo" fetch targets/file.txt
+"$bin" -p "$temp" -f "$repo" verify targets/file.txt
+
+tree "$temp"
diff --git a/src/http.rs b/src/http.rs
new file mode 100644
index 0000000..7a02af7
--- /dev/null
+++ b/src/http.rs
@@ -0,0 +1,15 @@
+use hyper::client::Client as HttpClient;
+use hyper::client::response::Response;
+use hyper::header::{Headers, UserAgent};
+use url::Url;
+
+use error::Error;
+use util;
+
+pub fn get(http_client: &HttpClient, url: &Url) -> Result<Response, Error> {
+ let mut headers = Headers::new();
+ headers.set(UserAgent(format!("rust-tuf/{}", env!("CARGO_PKG_VERSION"))));
+ let req = http_client.get(util::url_to_hyper_url(url)?)
+ .headers(headers);
+ Ok(req.send()?)
+}
diff --git a/src/lib.rs b/src/lib.rs
index 941b574..6b453cc 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -153,11 +153,14 @@
extern crate untrusted;
extern crate uuid;
+#[macro_use]
+mod util;
+
mod cjson;
+mod http;
mod metadata;
mod error;
mod tuf;
-mod util;
pub use tuf::*;
pub use error::*;
diff --git a/src/main.rs b/src/main.rs
index 13b3edc..7e09c25 100644
--- a/src/main.rs
+++ b/src/main.rs
@@ -12,8 +12,6 @@
use _tuf::{Tuf, Config, Error, RemoteRepo};
use url::Url;
-// TODO logging
-
fn main() {
let matches = parser().get_matches();
env_logger::init().unwrap();
@@ -33,6 +31,7 @@
.unwrap();
let config = Config::build().remote(remote)
.local_path(PathBuf::from(matches.value_of("path").unwrap()))
+ .init(false)
.finish()?;
if let Some(matches) = matches.subcommand_matches("fetch") {
diff --git a/src/tuf.rs b/src/tuf.rs
index fec0a1d..0227057 100644
--- a/src/tuf.rs
+++ b/src/tuf.rs
@@ -7,16 +7,17 @@
use std::collections::{HashMap, HashSet};
use std::fs::{self, File, DirBuilder};
use std::io::{Read, Write, Seek, SeekFrom};
-use std::path::PathBuf;
+use std::path::{PathBuf, Path};
use url::Url;
use uuid::Uuid;
use cjson;
use error::Error;
+use http;
use metadata::{Role, RoleType, Root, Targets, Timestamp, Snapshot, Metadata, SignedMetadata,
RootMetadata, TargetsMetadata, TimestampMetadata, SnapshotMetadata, HashType,
HashValue, KeyId, Key};
-use util;
+use util::{self, TempFile};
/// A remote TUF repository.
#[derive(Debug)]
@@ -65,6 +66,7 @@
&config.http_client,
&Role::Root,
Some(1),
+ true,
modified_root.root.threshold,
&modified_root.root.key_ids,
&modified_root.keys,
@@ -81,6 +83,7 @@
&config.http_client,
&Role::Root,
Some(1),
+ true,
modified_root.root.threshold,
&modified_root.root.key_ids,
&modified_root.keys,
@@ -114,11 +117,14 @@
let root = {
let fetch_type = &FetchType::Cache(config.local_path.clone());
- let root = Self::unverified_read_root(fetch_type, &config.http_client)?;
+ let (_, root) =
+ Self::unverified_read_root(fetch_type, &config.http_client, None)?;
+
Self::get_metadata::<Root, RootMetadata, File>(fetch_type,
&config.http_client,
&Role::Root,
None,
+ true,
root.root.threshold,
&root.root.key_ids,
&root.keys,
@@ -162,12 +168,8 @@
// TODO clean function that cleans up local_path for old targets, old dirs, etc
- fn temp_file(&self) -> Result<(File, PathBuf), Error> {
- let uuid = Uuid::new_v4();
- let path = self.local_path.as_path().join("temp").join(uuid.hyphenated().to_string());
-
- debug!("Creating temp file: {:?}", path);
- Ok((File::create(path.clone())?, path.to_path_buf()))
+ fn temp_file(&self) -> Result<TempFile, Error> {
+ Ok(TempFile::new(self.local_path.join("temp"))?)
}
/// Update the metadata from local and remote sources.
@@ -210,86 +212,63 @@
fn update_root(&mut self, fetch_type: &FetchType) -> Result<(), Error> {
debug!("Updating root metadata");
- let temp_root = Self::unverified_read_root(fetch_type, &self.http_client)?;
+ let (_, temp_root) =
+ Self::unverified_read_root(fetch_type, &self.http_client, Some(self.local_path.as_path()))?;
+
+ // handle the edge case where we never enter the update look
+ // AND the first piece of metadata is expired
+ if temp_root.version == 1 && self.root.expires() <= &UTC::now() {
+ return Err(Error::ExpiredMetadata(Role::Root));
+ }
// TODO reuse temp root as last one
for i in (self.root.version + 1)..(temp_root.version + 1) {
- let (mut out, out_path) = if !fetch_type.is_cache() {
- let (file, path) = self.temp_file()?;
- (Some(file), Some(path))
+ let mut temp_file = if !fetch_type.is_cache() {
+ Some(self.temp_file()?)
} else {
- (None, None)
+ None
};
- let root = match Self::get_metadata::<Root, RootMetadata, File>(fetch_type,
- &self.http_client,
- &Role::Root,
- Some(i),
- self.root.root.threshold,
- &self.root.root.key_ids,
- &self.root.keys,
- None,
- None,
- &mut out) {
- Ok(root) => root,
- Err(e) => {
- match out_path {
- Some(out_path) => {
- match fs::remove_file(out_path.clone()) {
- Ok(_) => (),
- Err(e) => warn!("Error removing temp file {:?}: {}", out_path, e),
- }
- }
- None => (),
- }
- return Err(e);
- }
- };
+ let root = Self::get_metadata::<Root, RootMetadata, TempFile>(fetch_type,
+ &self.http_client,
+ &Role::Root,
+ Some(i),
+ true,
+ self.root.root.threshold,
+ &self.root.root.key_ids,
+ &self.root.keys,
+ None,
+ None,
+ &mut temp_file)?;
// verify root again against itself (for cross signing)
// TODO this is not the most efficient way to do it, but it works
- match Self::get_metadata::<Root, RootMetadata, File>(fetch_type,
- &self.http_client,
- &Role::Root,
- Some(i),
- root.root.threshold,
- &root.root.key_ids,
- &root.keys,
- None,
- None,
- &mut None::<File>) {
- Ok(root_again) => {
- if root != root_again {
- // TODO better error message
- return Err(Error::Generic(format!("Cross singning of root version {} \
- failed",
- i)));
- }
- }
- Err(e) => {
- match out_path {
- Some(out_path) => {
- match fs::remove_file(out_path.clone()) {
- Ok(_) => (),
- Err(e) => warn!("Error removing temp file {:?}: {}", out_path, e),
- }
- }
- None => (),
- }
- return Err(e);
- }
- };
+ let root_again =
+ Self::get_metadata::<Root, RootMetadata, File>(fetch_type,
+ &self.http_client,
+ &Role::Root,
+ Some(i),
+ false,
+ root.root.threshold,
+ &root.root.key_ids,
+ &root.keys,
+ None,
+ None,
+ &mut None::<File>)?;
+ if root != root_again {
+ // TODO better error message
+ return Err(Error::Generic(format!("Cross singning of root version {} failed", i)));
+ }
info!("Rotated to root metadata version {}", i);
self.root = root;
- match out_path {
- Some(out_path) => {
- fs::rename(out_path,
- self.local_path
- .join("metadata")
- .join("archive")
- .join(format!("{}.root.json", i)))?
+ match temp_file {
+ Some(temp_file) => {
+ temp_file.persist(&self.local_path
+ .join("metadata")
+ .join("archive")
+ .join(format!("{}.root.json", i)))?
}
None => (),
};
@@ -308,32 +287,27 @@
fn update_timestamp(&mut self, fetch_type: &FetchType) -> Result<bool, Error> {
debug!("Updating timestamp metadata");
- let (mut out, out_path) = if !fetch_type.is_cache() {
- let (file, path) = self.temp_file()?;
- (Some(file), Some(path))
+ let mut temp_file = if !fetch_type.is_cache() {
+ Some(self.temp_file()?)
} else {
- (None, None)
+ None
};
let timestamp =
- Self::get_metadata::<Timestamp, TimestampMetadata, File>(fetch_type,
+ Self::get_metadata::<Timestamp, TimestampMetadata, TempFile>(fetch_type,
&self.http_client,
&Role::Timestamp,
None,
+ false,
self.root.timestamp.threshold,
&self.root.timestamp.key_ids,
&self.root.keys,
None,
None,
- &mut out)?;
+ &mut temp_file)?;
match self.timestamp {
Some(ref t) if t.version > timestamp.version => {
- match out_path {
- Some(out_path) => fs::remove_file(out_path)?,
- None => (),
- };
-
return Err(Error::VersionDecrease(Role::Timestamp));
}
Some(ref t) if t.version == timestamp.version => return Ok(false),
@@ -344,24 +318,13 @@
if let Some(ref timestamp_meta) = timestamp.meta.get("snapshot.json") {
if timestamp_meta.version > timestamp.version {
info!("Timestamp metadata is up to date");
-
- match out_path {
- Some(out_path) => {
- match fs::remove_file(out_path.clone()) {
- Ok(_) => (),
- Err(e) => warn!("Error removing temp file {:?}: {}", out_path, e),
- }
- }
- None => (),
- };
-
return Ok(false);
}
}
}
- match out_path {
- Some(out_path) => {
+ match temp_file {
+ Some(temp_file) => {
let current_path = self.local_path
.join("metadata")
.join("current")
@@ -375,7 +338,7 @@
.join("timestamp.json"))?;
};
- fs::rename(out_path, current_path)?
+ temp_file.persist(¤t_path)?
}
None => (),
};
@@ -410,34 +373,30 @@
})
.ok_or_else(|| Error::NoSupportedHashAlgorithms)?;
- let (mut out, out_path) = if !fetch_type.is_cache() {
- let (file, path) = self.temp_file()?;
- (Some(file), Some(path))
+ let mut temp_file = if !fetch_type.is_cache() {
+ Some(self.temp_file()?)
} else {
- (None, None)
+ None
};
let snapshot = Self::get_metadata::<Snapshot,
SnapshotMetadata,
- File>(fetch_type,
+ TempFile>(fetch_type,
&self.http_client,
&Role::Snapshot,
None,
+ false,
self.root.snapshot.threshold,
&self.root.snapshot.key_ids,
&self.root.keys,
Some(meta.length),
Some((&hash_alg, &expected_hash.0)),
- &mut out)?;
+ &mut temp_file)?;
// TODO ? check downloaded version matches what was in the timestamp.json
match self.snapshot {
Some(ref s) if s.version > snapshot.version => {
- match out_path {
- Some(out_path) => fs::remove_file(out_path)?,
- None => (),
- };
return Err(Error::VersionDecrease(Role::Snapshot));
}
Some(ref s) if s.version == snapshot.version => return Ok(false),
@@ -450,19 +409,14 @@
if let Some(ref targets) = self.targets {
if snapshot_meta.version > targets.version {
info!("Snapshot metadata is up to date");
-
- match out_path {
- Some(out_path) => fs::remove_file(out_path)?,
- None => (),
- };
return Ok(false);
}
}
}
}
- match out_path {
- Some(out_path) => {
+ match temp_file {
+ Some(temp_file) => {
let current_path = self.local_path
.join("metadata")
.join("current")
@@ -476,7 +430,7 @@
.join("snapshot.json"))?;
};
- fs::rename(out_path, current_path)?
+ temp_file.persist(¤t_path)?
}
None => (),
};
@@ -518,41 +472,36 @@
let hash_data = hash_data.map(|(t, v)| (t, v.0.as_slice()));
- let (mut out, out_path) = if !fetch_type.is_cache() {
- let (file, path) = self.temp_file()?;
- (Some(file), Some(path))
+ let mut temp_file = if !fetch_type.is_cache() {
+ Some(self.temp_file()?)
} else {
- (None, None)
+ None
};
- let targets = Self::get_metadata::<Targets, TargetsMetadata, File>(fetch_type,
+ let targets = Self::get_metadata::<Targets, TargetsMetadata, TempFile>(fetch_type,
&self.http_client,
&Role::Targets,
None,
+ false,
self.root.targets.threshold,
&self.root.targets.key_ids,
&self.root.keys,
meta.length,
hash_data,
- &mut out)?;
+ &mut temp_file)?;
// TODO ? check downloaded version matches what was in the snapshot.json
match self.targets {
Some(ref t) if t.version > targets.version => {
- match out_path {
- Some(out_path) => fs::remove_file(out_path)?,
- None => (),
- };
-
return Err(Error::VersionDecrease(Role::Targets));
}
Some(ref t) if t.version == targets.version => return Ok(()),
_ => self.targets = Some(targets),
}
- match out_path {
- Some(out_path) => {
+ match temp_file {
+ Some(temp_file) => {
let current_path = self.local_path
.join("metadata")
.join("current")
@@ -566,7 +515,7 @@
.join("targets.json"))?;
};
- fs::rename(out_path, current_path)?
+ temp_file.persist(¤t_path)?
}
None => (),
};
@@ -575,17 +524,18 @@
}
fn get_metadata<R: RoleType, M: Metadata<R>, W: Write>(fetch_type: &FetchType,
- http_client: &Client,
- role: &Role,
- metadata_version: Option<i32>,
- threshold: i32,
- trusted_ids: &[KeyId],
- available_keys: &HashMap<KeyId, Key>,
- size: Option<i64>,
- hash_data: Option<(&HashType,
- &[u8])>,
- mut out: &mut Option<W>)
- -> Result<M, Error> {
+ http_client: &Client,
+ role: &Role,
+ metadata_version: Option<i32>,
+ allow_expired: bool,
+ threshold: i32,
+ trusted_ids: &[KeyId],
+ available_keys: &HashMap<KeyId, Key>,
+ size: Option<i64>,
+ hash_data: Option<(&HashType,
+ &[u8])>,
+ mut out: &mut Option<W>)
+ -> Result<M, Error> {
debug!("Loading metadata from {:?}", fetch_type);
let metadata_version_str = metadata_version.map(|x| format!("{}.", x))
@@ -629,7 +579,7 @@
.map_err(|_| Error::Generic("URL path could not be mutated".to_string()))?
.push(&format!("{}{}.json", metadata_version_str, role));
}
- let mut resp = http_client.get(url).send()?;
+ let mut resp = http::get(http_client, &url)?;
let mut buf = Vec::new();
match (size, hash_data) {
@@ -645,9 +595,7 @@
let safe_bytes = Self::verify_meta::<R>(signed, role, threshold, trusted_ids, available_keys)?;
let meta: M = json::from_slice(&safe_bytes)?;
- // TODO this will be a problem with updating root metadata and this function probably
- // needs an arg like `allow_expired`.
- if meta.expires() <= &UTC::now() {
+ if !allow_expired && meta.expires() <= &UTC::now() {
return Err(Error::ExpiredMetadata(role.clone()));
}
@@ -660,9 +608,10 @@
}
fn unverified_read_root(fetch_type: &FetchType,
- http_client: &Client)
- -> Result<RootMetadata, Error> {
- let buf: Vec<u8> = match fetch_type {
+ http_client: &Client,
+ local_path: Option<&Path>)
+ -> Result<(Option<TempFile>, RootMetadata), Error> {
+ let (temp_file, buf): (Option<TempFile>, Vec<u8>) = match fetch_type {
&FetchType::Cache(ref local_path) => {
let path = local_path.join("metadata")
.join("current")
@@ -670,32 +619,58 @@
let mut file = File::open(path.clone()).map_err(|e| Error::from_io(e, &path))?;
let mut buf = Vec::new();
file.read_to_end(&mut buf).map(|_| ())?;
- buf
+ (None, buf)
}
&FetchType::File(ref path) => {
- let path = path.join("root.json");
- let mut file = File::open(path.clone()).map_err(|e| Error::from_io(e, &path))?;
+ let local_path = local_path.ok_or_else(|| {
+ let msg = "Programming error. No local path supplied for remote file read";
+ error!("{}", msg);
+ Error::Generic(msg.to_string())
+ })?;
+ let dest_path = local_path.join("temp")
+ .join(Uuid::new_v4().hyphenated().to_string());
+
+ let src_path = path.join("root.json");
+ fs::copy(src_path, dest_path.clone())?;
+
+ let mut temp_file = TempFile::from_existing(dest_path)
+ .map_err(|e| Error::from_io(e, &path))?;
let mut buf = Vec::new();
- file.read_to_end(&mut buf).map(|_| ())?;
- buf
+ temp_file.read_to_end(&mut buf).map(|_| ())?;
+ temp_file.seek(SeekFrom::Start(0))
+ .map_err(|e| Error::from_io(e, &path))?;
+
+ (Some(temp_file), buf)
}
&FetchType::Http(ref url) => {
+ let local_path = local_path.ok_or_else(|| {
+ let msg = "Programming error. No local path supplied for remote HTTP read";
+ error!("{}", msg);
+ Error::Generic(msg.to_string())
+ })?;
+
+ let mut temp_file = TempFile::new(local_path.to_path_buf())?;
+
let mut url = url.clone();
{
url.path_segments_mut()
.map_err(|_| Error::Generic("URL path could not be mutated".to_string()))?
.push("root.json");
}
- let mut resp = http_client.get(url).send()?;
+ let mut resp = http::get(http_client, &url)?;
let mut buf = Vec::new();
resp.read_to_end(&mut buf).map(|_| ())?;
- buf
+
+ temp_file.write_all(&buf).map(|_| ())?;
+ temp_file.seek(SeekFrom::Start(0))?;
+
+ (Some(temp_file), buf)
}
};
let signed: SignedMetadata<Root> = json::from_slice(&buf)?;
let root_str = signed.signed.to_string();
- Ok(json::from_str(&root_str)?)
+ Ok((temp_file, json::from_str(&root_str)?))
}
/// Read the root.json metadata and replace keys for the root role with the keys that are given
@@ -731,7 +706,7 @@
.map_err(|_| Error::Generic("URL path could not be mutated".to_string()))?
.push("1.root.json");
}
- let mut resp = http_client.get(url).send()?;
+ let mut resp = http::get(http_client, &url)?;
let mut buf = Vec::new();
resp.read_to_end(&mut buf).map(|_| ())?;
buf
@@ -851,7 +826,7 @@
let _ = file.seek(SeekFrom::Start(0))?;
return Ok(path);
} else {
- let (out, out_path) = self.temp_file()?;
+ let mut temp_file = self.temp_file()?;
match self.remote {
RemoteRepo::File(ref path) => {
@@ -860,7 +835,7 @@
let mut file = File::open(path.clone()).map_err(|e| Error::from_io(e, &path))?;
match Self::read_and_verify(&mut file,
- &mut Some(out),
+ &mut Some(temp_file.file_mut()?),
Some(target_meta.length),
Some((&hash_alg, &expected_hash.0))) {
Ok(()) => {
@@ -876,15 +851,10 @@
.create(parent)?;
}
- fs::rename(out_path, storage_path.clone())?;
+ temp_file.persist(&storage_path)?;
return Ok(storage_path)
}
- Err(e) => {
- match fs::remove_file(out_path.clone()) {
- Ok(_) => warn!("Error verifying target: {:?}", e),
- Err(e) => warn!("Error removing temp file {:?}: {}", out_path, e),
- }
- }
+ Err(e) => warn!("Error verifying target: {:?}", e),
}
}
RemoteRepo::Http(ref url) => {
@@ -895,10 +865,10 @@
.extend(util::url_path_to_path_components(&target)?);
}
let url = util::url_to_hyper_url(&url)?;
- let mut resp = self.http_client.get(url).send()?;
+ let mut resp = http::get(&self.http_client, &url)?;
match Self::read_and_verify(&mut resp,
- &mut Some(out),
+ &mut Some(temp_file.file_mut()?),
Some(target_meta.length),
Some((&hash_alg, &expected_hash.0))) {
Ok(()) => {
@@ -915,16 +885,11 @@
.create(parent)?;
}
- fs::rename(out_path, storage_path.clone())?;
+ temp_file.persist(&storage_path)?;
return Ok(storage_path)
}
- Err(e) => {
- match fs::remove_file(out_path.clone()) {
- Ok(_) => warn!("Error verifying target: {:?}", e),
- Err(e) => warn!("Error removing temp file {:?}: {}", out_path, e),
- }
- }
+ Err(e) => warn!("Error verifying target: {:?}", e),
}
}
}
@@ -1189,6 +1154,7 @@
&self.tuf.http_client,
&Role::TargetsDelegation(delegation.name.clone()),
None,
+ false,
delegation.threshold,
&delegation.key_ids,
&delegations.keys,
diff --git a/src/util.rs b/src/util.rs
index 67d561d..85991a5 100644
--- a/src/util.rs
+++ b/src/util.rs
@@ -1,7 +1,10 @@
use hyper;
+use std::fs::{self, File};
+use std::io::{self, Read, Write, Seek, SeekFrom};
use std::path::{Path, PathBuf};
use url::Url;
use url::percent_encoding::percent_decode;
+use uuid::Uuid;
use error::Error;
@@ -38,6 +41,85 @@
Ok(hyper::Url::parse(url.as_str())?)
}
+
+#[derive(Debug)]
+struct TempFileInner {
+ path: PathBuf,
+ file: File,
+}
+
+#[derive(Debug)]
+pub struct TempFile(Option<TempFileInner>);
+
+impl TempFile {
+ pub fn new(prefix: PathBuf) -> Result<Self, io::Error> {
+ let path = prefix.join(Uuid::new_v4().hyphenated().to_string());
+ Ok(TempFile(Some(TempFileInner {
+ path: path.clone(),
+ file: File::create(path)?,
+ })))
+ }
+
+ pub fn from_existing(path: PathBuf) -> Result<Self, io::Error> {
+ Ok(TempFile(Some( TempFileInner {
+ path: path.clone(),
+ file: File::open(path)?,
+ })))
+ }
+
+ pub fn file_mut(&mut self) -> Result<&mut File, io::Error> {
+ match self.0 {
+ Some(ref mut inner) => Ok(&mut inner.file),
+ None => Err(io::Error::new(io::ErrorKind::Other, "invalid TempFile reference"))
+ }
+ }
+
+ pub fn persist(mut self, dest: &Path) -> Result<(), io::Error> {
+ match self.0.take() {
+ Some(inner) => fs::rename(inner.path, dest),
+ None => Err(io::Error::new(io::ErrorKind::Other, "invalid TempFile reference")),
+ }
+ }
+}
+
+impl Write for TempFile {
+ fn write(&mut self, buf: &[u8]) -> Result<usize, io::Error> {
+ self.file_mut()?.write(buf)
+ }
+
+ fn flush(&mut self) -> Result<(), io::Error> {
+ self.file_mut()?.flush()
+ }
+}
+
+impl Read for TempFile {
+ fn read(&mut self, buf: &mut [u8]) -> Result<usize, io::Error> {
+ self.file_mut()?.read(buf)
+ }
+}
+
+impl Seek for TempFile {
+ fn seek(&mut self, pos: SeekFrom) -> Result<u64, io::Error> {
+ self.file_mut()?.seek(pos)
+ }
+}
+
+impl Drop for TempFile {
+ fn drop(&mut self) {
+ match self.0.take() {
+ Some(inner) => {
+ drop(inner.file);
+ match fs::remove_file(inner.path) {
+ Ok(()) => (),
+ Err(e) => warn!("Failed to delete tempfile: {:?}", e),
+ }
+ },
+ None => (),
+ }
+ }
+}
+
+
#[cfg(test)]
mod test {
use super::*;
diff --git a/tests/tuf-test-vectors b/tests/tuf-test-vectors
index fb35eeb..6e17614 160000
--- a/tests/tuf-test-vectors
+++ b/tests/tuf-test-vectors
@@ -1 +1 @@
-Subproject commit fb35eeb1c8c07a76274111d49228acc4a04bef3c
+Subproject commit 6e176149be1b0b05df567658173d8149aa1df57e
diff --git a/tests/vectors.rs b/tests/vectors.rs
index 0d17c5c..5acc59e 100644
--- a/tests/vectors.rs
+++ b/tests/vectors.rs
@@ -8,9 +8,9 @@
extern crate url;
use data_encoding::HEXLOWER;
-use std::fs::File;
-use std::io::Read;
-use std::path::PathBuf;
+use std::fs::{self, File, DirEntry};
+use std::io::{self, Read};
+use std::path::{PathBuf, Path};
use tempdir::TempDir;
use tuf::{Tuf, Config, Error, RemoteRepo};
use tuf::meta::{Key, KeyValue, KeyType};
@@ -38,6 +38,7 @@
struct VectorMetaEntry {
repo: String,
error: Option<String>,
+ is_success: bool,
root_keys: Vec<RootKeyData>,
}
@@ -53,6 +54,20 @@
Http
}
+fn ensure_empty(path: &Path) {
+ if !path.is_dir() {
+ panic!("Path wasn't a dir: {:?}", path)
+ }
+
+ let res = fs::read_dir(path).expect("couldn't read dir").collect::<Vec<io::Result<DirEntry>>>();
+ if !res.is_empty() {
+ panic!("Temp dir not empty: {:?}", res)
+ }
+ if !res.iter().all(|x| x.is_ok()) {
+ panic!("Temp dir errors: {:?}", res)
+ }
+}
+
fn run_test_vector(test_path: &str, test_type: TestType) {
let temp_dir = TempDir::new("rust-tuf").expect("couldn't make temp dir");
let temp_path = temp_dir.into_path();
@@ -187,6 +202,10 @@
x => panic!("Unexpected failures: {:?}", x),
}
+ ensure_empty(&temp_path.join("temp"));
+ if !test_vector.is_success {
+ ensure_empty(&temp_path.join("targets"))
+ }
}
@@ -264,3 +283,4 @@
test_cases!("052", _052);
test_cases!("053", _053);
test_cases!("054", _054);
+test_cases!("055", _055);