Merge branch 'develop'
diff --git a/.gitmodules b/.gitmodules
deleted file mode 100644
index 6ca8ebc..0000000
--- a/.gitmodules
+++ /dev/null
@@ -1,3 +0,0 @@
-[submodule "tests/tuf-test-vectors"]
- path = tests/tuf-test-vectors
- url = https://github.com/heartsucker/tuf-test-vectors.git
diff --git a/.travis.yml b/.travis.yml
index 8a01660..83392c2 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -14,20 +14,10 @@
# for codecov.io
- RUSTFLAGS="-C link-dead-code"
-install:
- - sudo add-apt-repository -y ppa:fkrull/deadsnakes
- - sudo apt-get update
- - sudo apt-get install -y python3.5 python3.5-dev
- - sudo ln -sf /usr/bin/python3.5 /usr/bin/python3
- - wget https://bootstrap.pypa.io/get-pip.py
- - sudo python3 get-pip.py
- - sudo pip3 install virtualenv
- - cd tests/tuf-test-vectors && make init && cd ../../
-
script:
- - RUST_BACKTRACE=full cargo build --verbose --features=cli
+ - RUST_BACKTRACE=full cargo build --verbose
- ./tests/tuf-test-vectors/server.py --path tuf &>/dev/null &
- - RUST_BACKTRACE=full cargo test --verbose --features=cli
+ - RUST_BACKTRACE=full cargo test --verbose
after_failure:
- cat Cargo.lock
diff --git a/Cargo.toml b/Cargo.toml
index 1a0bbb9..84565f6 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -1,13 +1,13 @@
[package]
name = "tuf"
-version = "0.1.6"
+version = "0.2.0"
authors = [ "heartsucker <heartsucker@autistici.org>" ]
description = "Library for The Update Framework (TUF)"
homepage = "https://github.com/heartsucker/rust-tuf"
repository = "https://github.com/heartsucker/rust-tuf"
documentation = "https://docs.rs/tuf"
readme = "README.md"
-license = "MIT"
+license = "MIT/Apache-2.0"
keywords = [ "security" ]
categories = [ "cryptography" ]
@@ -20,31 +20,23 @@
name = "tuf"
path = "./src/lib.rs"
-[[bin]]
-name = "tuf"
-path = "./src/main.rs"
-doc = false
-required-features = [ "cli" ]
-
-[features]
-cli = [ "clap" ]
-
[dependencies]
-chrono = { version = "0.3.1", features = [ "serde" ] }
-clap = { version = "2.23", optional = true }
+chrono = { version = "0.4", features = [ "serde" ] }
data-encoding = "2.0.0-rc.1"
+derp = "0.0.4"
env_logger = "0.4.3"
hyper = "0.10.10"
itoa = "0.3"
log = "0.3"
-pem = "0.4"
-ring = "0.9.4"
+ring = { version = "0.9.4", features = [ "rsa_signing" ] }
serde = "1"
serde_derive = "1"
serde_json = "1"
+tempfile = "2.1.5"
url = "1.4"
untrusted = "0.5"
uuid = { version = "0.5", features = [ "v4" ] }
[dev-dependencies]
+maplit = "0.1.4"
tempdir = "0.3"
diff --git a/LICENSE-APACHE b/LICENSE-APACHE
new file mode 100644
index 0000000..e646da5
--- /dev/null
+++ b/LICENSE-APACHE
@@ -0,0 +1,203 @@
+Apache License
+Version 2.0, January 2004
+http://www.apache.org/licenses/
+Copyright (c) 2017 heartsucker, Advanced Telematic Systems GmbH
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
+
+APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+Copyright [yyyy] [name of copyright owner]
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
diff --git a/LICENSE b/LICENSE-MIT
similarity index 93%
rename from LICENSE
rename to LICENSE-MIT
index 1512693..045d2f0 100644
--- a/LICENSE
+++ b/LICENSE-MIT
@@ -1,6 +1,6 @@
The MIT License (MIT)
-Copyright (c) 2017 heartsucker
+Copyright (c) 2017 heartsucker, Advanced Telematic Systems GmbH
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation the
diff --git a/Makefile b/Makefile
index 09f45e0..c71954d 100644
--- a/Makefile
+++ b/Makefile
@@ -1,8 +1,11 @@
-.PHONY: help dev-docs
+.PHONY: help clean dev-docs
.DEFAULT_GOAL := help
+clean: ## Remove temp/useless files
+ @find . -name '*.rs.bk' -type f -delete
+
dev-docs: ## Generate the documentation for all modules (dev friendly)
- @cargo rustdoc --all-features -- --no-defaults --passes "collapse-docs" --passes "unindent-comments"
+ @cargo rustdoc --all-features --open -- --no-defaults --passes "collapse-docs" --passes "unindent-comments"
help: ## Print this message
@awk 'BEGIN {FS = ":.*?## "} /^[0-9a-zA-Z_-]+:.*?## / {printf "\033[36m%16s\033[0m : %s\n", $$1, $$2}' $(MAKEFILE_LIST)
diff --git a/README.md b/README.md
index 79f836b..f7e65af 100644
--- a/README.md
+++ b/README.md
@@ -15,13 +15,6 @@
Please make all pull requests to the `develop` branch.
-### Testing
-
-`rust-tuf` uses [`tuf-test-vectors`](https://github.com/heartsucker/tuf-test-vectors)
-to generate integration tests. When adding a complicated feature it may be
-necessary for you to make a separate pull request to that repository to ensure
-the required behaviors are sufficiently tested.
-
### Bugs
This project has a **full disclosure** policy on security related errors. Please
@@ -32,7 +25,8 @@
### License
-This work is licensed under the MIT license. See [LICENSE](./LICENSE) for details.
+This work is dual licensed under the MIT and Apache-2.0 licenses.
+See [LICENSE-MIT](./LICENSE-MIT) and [LICENSE-APACHE](./LICENSE-APACHE) for details.
### Cryptography Notice
diff --git a/appveyor.yml b/appveyor.yml
index b98fc75..b146c3a 100644
--- a/appveyor.yml
+++ b/appveyor.yml
@@ -49,10 +49,9 @@
- rustup.exe default %RUST_VERSION%
- rustc -V
- cargo -V
- - git submodule update --init --recursive
- - cargo build --verbose --features=cli --target %TARGET%
+ - cargo build --verbose --target %TARGET%
- SET RUST_BACKTRACE=full
- - cargo test --verbose --features=cli --target %TARGET%
+ - cargo test --verbose --target %TARGET%
# TODO on failure cat Cargo.lock
diff --git a/scripts/test-repo.sh b/scripts/test-repo.sh
deleted file mode 100755
index 953c5dc..0000000
--- a/scripts/test-repo.sh
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/bin/bash
-set -ue
-
-'Runs the CLI tool against a repo and prints the state after.'
-
-cd "$(dirname "$(readlink -f "$0")")/.."
-
-declare -r bin="target/debug/tuf"
-temp=$(mktemp -d)
-declare -r temp
-declare -r repo="tests/tuf-test-vectors/tuf/$1/repo"
-
-cargo build --features=cli
-
-set +e
-export RUST_LOG='debug'
-
-"$bin" -p "$temp" -f "$repo" init
-cp "$repo"/root.json "$temp/metadata/current"
-"$bin" -p "$temp" -f "$repo" update
-"$bin" -p "$temp" -f "$repo" fetch targets/file.txt
-"$bin" -p "$temp" -f "$repo" verify targets/file.txt
-
-tree "$temp"
diff --git a/src/client.rs b/src/client.rs
new file mode 100644
index 0000000..ccba448
--- /dev/null
+++ b/src/client.rs
@@ -0,0 +1,316 @@
+//! Clients for high level interactions with TUF repositories.
+
+use Result;
+use error::Error;
+use interchange::DataInterchange;
+use metadata::{MetadataVersion, RootMetadata, Role, MetadataPath, TargetPath};
+use repository::Repository;
+use tuf::Tuf;
+
+/// A client that interacts with TUF repositories.
+pub struct Client<D, L, R>
+where
+ D: DataInterchange,
+ L: Repository<D>,
+ R: Repository<D>,
+{
+ tuf: Tuf<D>,
+ config: Config,
+ local: L,
+ remote: R,
+}
+
+impl<D, L, R> Client<D, L, R>
+where
+ D: DataInterchange,
+ L: Repository<D>,
+ R: Repository<D>,
+{
+ /// Create a new TUF client from the given `Tuf` (metadata storage) and local and remote
+ /// repositories.
+ pub fn new(tuf: Tuf<D>, config: Config, mut local: L, mut remote: R) -> Result<Self> {
+ local.initialize()?;
+ remote.initialize()?;
+
+ Ok(Client {
+ tuf: tuf,
+ config: config,
+ local: local,
+ remote: remote,
+ })
+ }
+
+ /// Update TUF metadata from the local repository.
+ ///
+ /// Returns `true` if an update occurred and `false` otherwise.
+ pub fn update_local(&mut self) -> Result<bool> {
+ let r = Self::update_root(&mut self.tuf, &mut self.local, &self.config.max_root_size)?;
+ let ts = match Self::update_timestamp(
+ &mut self.tuf,
+ &mut self.local,
+ &self.config.max_timestamp_size,
+ ) {
+ Ok(b) => b,
+ Err(e) => {
+ warn!(
+ "Error updating timestamp metadata from local sources: {:?}",
+ e
+ );
+ false
+ }
+ };
+ let sn = match Self::update_snapshot(&mut self.tuf, &mut self.local) {
+ Ok(b) => b,
+ Err(e) => {
+ warn!(
+ "Error updating snapshot metadata from local sources: {:?}",
+ e
+ );
+ false
+ }
+ };
+ let ta = match Self::update_targets(&mut self.tuf, &mut self.local) {
+ Ok(b) => b,
+ Err(e) => {
+ warn!(
+ "Error updating targets metadata from local sources: {:?}",
+ e
+ );
+ false
+ }
+ };
+
+ Ok(r || ts || sn || ta)
+ }
+
+ /// Update TUF metadata from the remote repository.
+ ///
+ /// Returns `true` if an update occurred and `false` otherwise.
+ pub fn update_remote(&mut self) -> Result<bool> {
+ let r = Self::update_root(&mut self.tuf, &mut self.remote, &self.config.max_root_size)?;
+ let ts = Self::update_timestamp(
+ &mut self.tuf,
+ &mut self.remote,
+ &self.config.max_timestamp_size,
+ )?;
+ let sn = Self::update_snapshot(&mut self.tuf, &mut self.remote)?;
+ let ta = Self::update_targets(&mut self.tuf, &mut self.remote)?;
+
+ Ok(r || ts || sn || ta)
+ }
+
+ /// Returns `true` if an update occurred and `false` otherwise.
+ fn update_root<T>(tuf: &mut Tuf<D>, repo: &mut T, max_root_size: &Option<usize>) -> Result<bool>
+ where
+ T: Repository<D>,
+ {
+ let latest_root = repo.fetch_metadata(
+ &Role::Root,
+ &MetadataPath::from_role(&Role::Root),
+ &MetadataVersion::None,
+ max_root_size,
+ None,
+ )?;
+ let latest_version = D::deserialize::<RootMetadata>(latest_root.signed())?
+ .version();
+
+ if latest_version < tuf.root().version() {
+ return Err(Error::VerificationFailure(format!(
+ "Latest root version is lower than current root version: {} < {}",
+ latest_version,
+ tuf.root().version()
+ )));
+ } else if latest_version == tuf.root().version() {
+ return Ok(false);
+ }
+
+ let err_msg = "TUF claimed no update occurred when one should have. \
+ This is a programming error. Please report this as a bug.";
+
+ for i in (tuf.root().version() + 1)..latest_version {
+ let signed = repo.fetch_metadata(
+ &Role::Root,
+ &MetadataPath::from_role(&Role::Root),
+ &MetadataVersion::Number(i),
+ max_root_size,
+ None,
+ )?;
+ if !tuf.update_root(signed)? {
+ error!("{}", err_msg);
+ return Err(Error::Programming(err_msg.into()));
+ }
+ }
+
+ if !tuf.update_root(latest_root)? {
+ error!("{}", err_msg);
+ return Err(Error::Programming(err_msg.into()));
+ }
+ Ok(true)
+ }
+
+ /// Returns `true` if an update occurred and `false` otherwise.
+ fn update_timestamp<T>(
+ tuf: &mut Tuf<D>,
+ repo: &mut T,
+ max_timestamp_size: &Option<usize>,
+ ) -> Result<bool>
+ where
+ T: Repository<D>,
+ {
+ let ts = repo.fetch_metadata(
+ &Role::Timestamp,
+ &MetadataPath::from_role(&Role::Timestamp),
+ &MetadataVersion::None,
+ max_timestamp_size,
+ None,
+ )?;
+ tuf.update_timestamp(ts)
+ }
+
+ /// Returns `true` if an update occurred and `false` otherwise.
+ fn update_snapshot<T>(tuf: &mut Tuf<D>, repo: &mut T) -> Result<bool>
+ where
+ T: Repository<D>,
+ {
+ let snapshot_description = match tuf.timestamp() {
+ Some(ts) => {
+ match ts.meta().get(&MetadataPath::from_role(&Role::Snapshot)) {
+ Some(d) => Ok(d),
+ None => Err(Error::VerificationFailure(
+ "Timestamp metadata did not contain a description of the \
+ current snapshot metadata."
+ .into(),
+ )),
+ }
+ }
+ None => Err(Error::MissingMetadata(Role::Timestamp)),
+ }?
+ .clone();
+
+ if snapshot_description.version() <= tuf.snapshot().map(|s| s.version()).unwrap_or(0) {
+ return Ok(false);
+ }
+
+ let snap = repo.fetch_metadata(
+ &Role::Snapshot,
+ &MetadataPath::from_role(&Role::Snapshot),
+ &MetadataVersion::None,
+ &None,
+ None,
+ )?;
+ tuf.update_snapshot(snap)
+ }
+
+ /// Returns `true` if an update occurred and `false` otherwise.
+ fn update_targets<T>(tuf: &mut Tuf<D>, repo: &mut T) -> Result<bool>
+ where
+ T: Repository<D>,
+ {
+ let targets_description = match tuf.snapshot() {
+ Some(sn) => {
+ match sn.meta().get(&MetadataPath::from_role(&Role::Targets)) {
+ Some(d) => Ok(d),
+ None => Err(Error::VerificationFailure(
+ "Snapshot metadata did not contain a description of the \
+ current targets metadata."
+ .into(),
+ )),
+ }
+ }
+ None => Err(Error::MissingMetadata(Role::Snapshot)),
+ }?
+ .clone();
+
+ if targets_description.version() <= tuf.targets().map(|t| t.version()).unwrap_or(0) {
+ return Ok(false);
+ }
+
+ let targets = repo.fetch_metadata(
+ &Role::Targets,
+ &MetadataPath::from_role(&Role::Targets),
+ &MetadataVersion::None,
+ &None,
+ None,
+ )?;
+ tuf.update_targets(targets)
+ }
+
+ /// Fetch a target from the remote repo and write it to the local repo.
+ pub fn fetch_target(&mut self, target: &TargetPath) -> Result<()> {
+ let target_description = self.tuf.target_description(target)?;
+ let read = self.remote.fetch_target(target)?;
+ self.local.store_target(read, target, target_description)
+ }
+}
+
+/// Configuration for a TUF `Client`.
+#[derive(Debug)]
+pub struct Config {
+ max_root_size: Option<usize>,
+ max_timestamp_size: Option<usize>,
+}
+
+impl Config {
+ /// Initialize a `ConfigBuilder` with the default values.
+ pub fn build() -> ConfigBuilder {
+ ConfigBuilder::default()
+ }
+
+ /// Return the optional maximum root metadata size.
+ pub fn max_root_size(&self) -> &Option<usize> {
+ &self.max_root_size
+ }
+
+ /// Return the optional maximum timestamp metadata size.
+ pub fn max_timestamp_size(&self) -> &Option<usize> {
+ &self.max_timestamp_size
+ }
+}
+
+/// Helper for building and validating a TUF `Config`.
+#[derive(Debug, PartialEq)]
+pub struct ConfigBuilder {
+ max_root_size: Option<usize>,
+ max_timestamp_size: Option<usize>,
+}
+
+impl ConfigBuilder {
+ /// Validate this builder return a `Config` if validation succeeds.
+ pub fn finish(self) -> Result<Config> {
+ Ok(Config {
+ max_root_size: self.max_root_size,
+ max_timestamp_size: self.max_timestamp_size,
+ })
+ }
+
+ /// Set the optional maximum download size for root metadata.
+ pub fn max_root_size(mut self, max: Option<usize>) -> Self {
+ self.max_root_size = max;
+ self
+ }
+
+ /// Set the optional maximum download size for timestamp metadata.
+ pub fn max_timestamp_size(mut self, max: Option<usize>) -> Self {
+ self.max_timestamp_size = max;
+ self
+ }
+}
+
+impl Default for ConfigBuilder {
+ /// ```
+ /// use tuf::client::ConfigBuilder;
+ ///
+ /// let default = ConfigBuilder::default();
+ /// let config = ConfigBuilder::default()
+ /// .max_root_size(Some(1024 * 1024))
+ /// .max_timestamp_size(Some(32 * 1024));
+ /// assert_eq!(config, default);
+ /// assert!(default.finish().is_ok())
+ /// ```
+ fn default() -> Self {
+ ConfigBuilder {
+ max_root_size: Some(1024 * 1024),
+ max_timestamp_size: Some(32 * 1024),
+ }
+ }
+}
diff --git a/src/crypto.rs b/src/crypto.rs
new file mode 100644
index 0000000..f4512fa
--- /dev/null
+++ b/src/crypto.rs
@@ -0,0 +1,862 @@
+//! Cryptographic structures and functions.
+
+use data_encoding::BASE64URL;
+use derp::{self, Der, Tag};
+use ring;
+use ring::digest::{self, SHA256};
+use ring::rand::SystemRandom;
+use ring::signature::{RSAKeyPair, RSASigningState, Ed25519KeyPair, ED25519,
+ RSA_PSS_2048_8192_SHA256, RSA_PSS_2048_8192_SHA512, RSA_PSS_SHA256,
+ RSA_PSS_SHA512};
+use serde::de::{Deserialize, Deserializer, Error as DeserializeError};
+use serde::ser::{Serialize, Serializer, Error as SerializeError};
+use std::collections::HashMap;
+use std::fmt::{self, Debug, Display};
+use std::str::FromStr;
+use std::sync::Arc;
+use untrusted::Input;
+
+use Result;
+use error::Error;
+use shims;
+
+const HASH_ALG_PREFS: &'static [HashAlgorithm] = &[HashAlgorithm::Sha512, HashAlgorithm::Sha256];
+
+/// 1.2.840.113549.1.1.1 rsaEncryption(PKCS #1)
+const RSA_SPKI_OID: &'static [u8] = &[0x2a, 0x86, 0x48, 0x86, 0xf7, 0x0d, 0x01, 0x01, 0x01];
+
+/// 1.3.101.112 curveEd25519(EdDSA 25519 signature algorithm)
+const ED25519_SPKI_OID: &'static [u8] = &[0x2b, 0x65, 0x70];
+
+/// Given a map of hash algorithms and their values, get the prefered algorithm and the hash
+/// calculated by it. Returns an `Err` if there is no match.
+///
+/// ```
+/// use std::collections::HashMap;
+/// use tuf::crypto::{hash_preference, HashValue, HashAlgorithm};
+///
+/// let mut map = HashMap::new();
+/// assert!(hash_preference(&map).is_err());
+///
+/// let _ = map.insert(HashAlgorithm::Sha512, HashValue::new(vec![0x00, 0x01]));
+/// assert_eq!(hash_preference(&map).unwrap().0, &HashAlgorithm::Sha512);
+///
+/// let _ = map.insert(HashAlgorithm::Sha256, HashValue::new(vec![0x02, 0x03]));
+/// assert_eq!(hash_preference(&map).unwrap().0, &HashAlgorithm::Sha512);
+/// ```
+pub fn hash_preference<'a>(
+ hashes: &'a HashMap<HashAlgorithm, HashValue>,
+) -> Result<(&'static HashAlgorithm, &'a HashValue)> {
+ for alg in HASH_ALG_PREFS {
+ match hashes.get(alg) {
+ Some(v) => return Ok((alg, v)),
+ None => continue,
+ }
+ }
+ Err(Error::NoSupportedHashAlgorithm)
+}
+
+/// Calculate the given key's ID.
+///
+/// A `KeyId` is calculated as `sha256(public_key_bytes)`. The TUF spec says that it should be
+/// `sha256(cjson(encoded(public_key_bytes)))`, but this is meaningless once the spec moves away
+/// from using only JSON as the data interchange format.
+fn calculate_key_id(public_key: &[u8]) -> KeyId {
+ let mut context = digest::Context::new(&SHA256);
+ context.update(&public_key);
+ KeyId(context.finish().as_ref().to_vec())
+}
+
+/// Wrapper type for public key's ID.
+///
+/// # Calculating
+/// In order to future proof the calculation of key IDs and preserver them across encoding types,
+/// a key's ID is calculated as the Sha-256 hash of the DER bytes of a key in Subject Public Key
+/// Info (SPKI) format.
+///
+/// ```bash
+/// SEQUENCE {
+/// SEQUENCE {
+/// OBJECT IDENTIFIER
+/// NULL
+/// }
+/// BIT STTRING
+/// }
+/// ```
+///
+/// Where `BIT STRING` encapsulates the actual public key. In the case of RSA this is:
+///
+/// ```bash
+/// SEQUENCE {
+/// INTEGER (n, modulus)
+/// INTEGER (e, exponent)
+/// }
+/// ```
+#[derive(Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
+pub struct KeyId(Vec<u8>);
+
+impl KeyId {
+ /// Parse a key ID from a base64url string.
+ pub fn from_string(string: &str) -> Result<Self> {
+ if string.len() != 44 {
+ return Err(Error::IllegalArgument(
+ "Base64 key ID must be 44 characters long".into(),
+ ));
+ }
+ Ok(KeyId(BASE64URL.decode(string.as_bytes())?))
+ }
+}
+
+impl Debug for KeyId {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ write!(f, "KeyId {{ \"{}\" }}", BASE64URL.encode(&self.0))
+ }
+}
+
+impl Serialize for KeyId {
+ fn serialize<S>(&self, ser: S) -> ::std::result::Result<S::Ok, S::Error>
+ where
+ S: Serializer,
+ {
+ BASE64URL.encode(&self.0).serialize(ser)
+ }
+}
+
+impl<'de> Deserialize<'de> for KeyId {
+ fn deserialize<D: Deserializer<'de>>(de: D) -> ::std::result::Result<Self, D::Error> {
+ let string: String = Deserialize::deserialize(de)?;
+ KeyId::from_string(&string).map_err(|e| DeserializeError::custom(format!("{:?}", e)))
+ }
+}
+
+/// Cryptographic signature schemes.
+#[derive(Debug, Clone, PartialEq)]
+pub enum SignatureScheme {
+ /// [Ed25519](https://ed25519.cr.yp.to/)
+ Ed25519,
+ /// [RSASSA-PSS](https://tools.ietf.org/html/rfc5756) calculated over SHA256
+ RsaSsaPssSha256,
+ /// [RSASSA-PSS](https://tools.ietf.org/html/rfc5756) calculated over SHA512
+ RsaSsaPssSha512,
+}
+
+impl ToString for SignatureScheme {
+ fn to_string(&self) -> String {
+ match self {
+ &SignatureScheme::Ed25519 => "ed25519",
+ &SignatureScheme::RsaSsaPssSha256 => "rsassa-pss-sha256",
+ &SignatureScheme::RsaSsaPssSha512 => "rsassa-pss-sha512",
+ }.to_string()
+ }
+}
+
+impl FromStr for SignatureScheme {
+ type Err = Error;
+
+ fn from_str(s: &str) -> ::std::result::Result<Self, Self::Err> {
+ match s {
+ "ed25519" => Ok(SignatureScheme::Ed25519),
+ "rsassa-pss-sha256" => Ok(SignatureScheme::RsaSsaPssSha256),
+ "rsassa-pss-sha512" => Ok(SignatureScheme::RsaSsaPssSha512),
+ typ => Err(Error::Encoding(typ.into())),
+ }
+ }
+}
+
+impl Serialize for SignatureScheme {
+ fn serialize<S>(&self, ser: S) -> ::std::result::Result<S::Ok, S::Error>
+ where
+ S: Serializer,
+ {
+ ser.serialize_str(&self.to_string())
+ }
+}
+
+impl<'de> Deserialize<'de> for SignatureScheme {
+ fn deserialize<D: Deserializer<'de>>(de: D) -> ::std::result::Result<Self, D::Error> {
+ let string: String = Deserialize::deserialize(de)?;
+ string.parse().map_err(|e| {
+ DeserializeError::custom(format!("{:?}", e))
+ })
+ }
+}
+
+/// Wrapper type for the value of a cryptographic signature.
+#[derive(PartialEq)]
+pub struct SignatureValue(Vec<u8>);
+
+impl SignatureValue {
+ /// Create a new `SignatureValue` from the given bytes.
+ pub fn new(bytes: Vec<u8>) -> Self {
+ SignatureValue(bytes)
+ }
+
+ /// Create a new `SignatureValue` from the given base64url string.
+ pub fn from_string(string: &str) -> Result<Self> {
+ Ok(SignatureValue(BASE64URL.decode(string.as_bytes())?))
+ }
+}
+
+impl Debug for SignatureValue {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ write!(f, "SignatureValue {{ \"{}\" }}", BASE64URL.encode(&self.0))
+ }
+}
+
+impl Serialize for SignatureValue {
+ fn serialize<S>(&self, ser: S) -> ::std::result::Result<S::Ok, S::Error>
+ where
+ S: Serializer,
+ {
+ BASE64URL.encode(&self.0).serialize(ser)
+ }
+}
+
+impl<'de> Deserialize<'de> for SignatureValue {
+ fn deserialize<D: Deserializer<'de>>(de: D) -> ::std::result::Result<Self, D::Error> {
+ let string: String = Deserialize::deserialize(de)?;
+ SignatureValue::from_string(&string).map_err(|e| {
+ DeserializeError::custom(format!("Signature value was not valid base64url: {:?}", e))
+ })
+ }
+}
+
+/// Types of public keys.
+#[derive(Clone, PartialEq, Debug)]
+pub enum KeyType {
+ /// [Ed25519](https://ed25519.cr.yp.to/)
+ Ed25519,
+ /// [RSA](https://en.wikipedia.org/wiki/RSA_%28cryptosystem%29)
+ Rsa,
+}
+
+impl KeyType {
+ fn from_oid(oid: &[u8]) -> Result<Self> {
+ match oid {
+ x if x == RSA_SPKI_OID => Ok(KeyType::Rsa),
+ x if x == ED25519_SPKI_OID => Ok(KeyType::Ed25519),
+ x => Err(Error::Encoding(format!(
+ "Unknown OID:{}",
+ x.iter().map(|b| format!("{:x}", b)).collect::<String>()
+ ))),
+ }
+ }
+
+ fn as_oid(&self) -> &'static [u8] {
+ match self {
+ &KeyType::Rsa => RSA_SPKI_OID,
+ &KeyType::Ed25519 => ED25519_SPKI_OID,
+ }
+ }
+}
+
+impl FromStr for KeyType {
+ type Err = Error;
+
+ fn from_str(s: &str) -> ::std::result::Result<Self, Self::Err> {
+ match s {
+ "ed25519" => Ok(KeyType::Ed25519),
+ "rsa" => Ok(KeyType::Rsa),
+ typ => Err(Error::Encoding(typ.into())),
+ }
+ }
+}
+
+impl ToString for KeyType {
+ fn to_string(&self) -> String {
+ match self {
+ &KeyType::Ed25519 => "ed25519",
+ &KeyType::Rsa => "rsa",
+ }.to_string()
+ }
+}
+
+impl Serialize for KeyType {
+ fn serialize<S>(&self, ser: S) -> ::std::result::Result<S::Ok, S::Error>
+ where
+ S: Serializer,
+ {
+ ser.serialize_str(&self.to_string())
+ }
+}
+
+impl<'de> Deserialize<'de> for KeyType {
+ fn deserialize<D: Deserializer<'de>>(de: D) -> ::std::result::Result<Self, D::Error> {
+ let string: String = Deserialize::deserialize(de)?;
+ string.parse().map_err(|e| {
+ DeserializeError::custom(format!("{:?}", e))
+ })
+ }
+}
+
+enum PrivateKeyType {
+ Ed25519(Ed25519KeyPair),
+ Rsa(Arc<RSAKeyPair>),
+}
+
+impl Debug for PrivateKeyType {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let s = match self {
+ &PrivateKeyType::Ed25519(_) => "ed25519",
+ &PrivateKeyType::Rsa(_) => "rsa",
+ };
+ write!(f, "PrivateKeyType {{ \"{}\" }}", s)
+ }
+}
+
+/// A structure containing information about a private key.
+pub struct PrivateKey {
+ private: PrivateKeyType,
+ public: PublicKey,
+}
+
+impl PrivateKey {
+ /// Create a private key from PKCS#8v2 DER bytes.
+ ///
+ /// # Generating Keys
+ ///
+ /// ## Ed25519
+ ///
+ /// ```bash
+ /// $ touch ed25519-private-key.pk8
+ /// $ chmod 0600 ed25519-private-key.pk8
+ /// ```
+ ///
+ /// ```no_run
+ /// extern crate ring;
+ /// use ring::rand::SystemRandom;
+ /// use ring::signature::Ed25519KeyPair;
+ /// use std::fs::File;
+ /// use std::io::Write;
+ ///
+ /// fn main() {
+ /// let mut file = File::open("ed25519-private-key.pk8").unwrap();
+ /// let key = Ed25519KeyPair::generate_pkcs8(&SystemRandom::new()).unwrap();
+ /// file.write_all(&key).unwrap()
+ /// }
+ /// ```
+ ///
+ /// ## RSA
+ ///
+ /// ```bash
+ /// $ umask 077
+ /// $ openssl genpkey -algorithm RSA \
+ /// -pkeyopt rsa_keygen_bits:4096 \
+ /// -pkeyopt rsa_keygen_pubexp:65537 | \
+ /// openssl pkcs8 -topk8 -nocrypt -outform der > rsa-4096-private-key.pk8
+ /// ```
+ pub fn from_pkcs8(der_key: &[u8]) -> Result<Self> {
+ match Self::ed25519_from_pkcs8(der_key) {
+ Ok(k) => Ok(k),
+ Err(e1) => {
+ match Self::rsa_from_pkcs8(der_key) {
+ Ok(k) => Ok(k),
+ Err(e2) => Err(Error::Opaque(format!(
+ "Key was neither Ed25519 nor RSA: {:?} {:?}",
+ e1,
+ e2
+ ))),
+ }
+ }
+ }
+ }
+
+ fn ed25519_from_pkcs8(der_key: &[u8]) -> Result<Self> {
+ let key = Ed25519KeyPair::from_pkcs8(Input::from(der_key)).map_err(
+ |_| {
+ Error::Encoding("Could not parse key as PKCS#8v2".into())
+ },
+ )?;
+
+ let public = PublicKey {
+ typ: KeyType::Ed25519,
+ key_id: calculate_key_id(&write_spki(key.public_key_bytes(), &KeyType::Ed25519)?),
+ value: PublicKeyValue(key.public_key_bytes().to_vec()),
+ };
+ let private = PrivateKeyType::Ed25519(key);
+
+ Ok(PrivateKey {
+ private: private,
+ public: public,
+ })
+ }
+
+ fn rsa_from_pkcs8(der_key: &[u8]) -> Result<Self> {
+ let key = RSAKeyPair::from_pkcs8(Input::from(der_key)).map_err(|_| {
+ Error::Encoding("Could not parse key as PKCS#8v2".into())
+ })?;
+
+ if key.public_modulus_len() < 256 {
+ return Err(Error::IllegalArgument(format!(
+ "RSA public modulus must be 2048 or greater. Found {}",
+ key.public_modulus_len() * 8
+ )));
+ }
+
+ let pub_key = extract_rsa_pub_from_pkcs8(der_key)?;
+
+ let public = PublicKey {
+ typ: KeyType::Rsa,
+ key_id: calculate_key_id(&write_spki(&pub_key, &KeyType::Rsa)?),
+ value: PublicKeyValue(pub_key),
+ };
+ let private = PrivateKeyType::Rsa(Arc::new(key));
+
+ Ok(PrivateKey {
+ private: private,
+ public: public,
+ })
+ }
+
+ /// Return whether or not this key supports the given signature scheme.
+ pub fn supports(&self, scheme: &SignatureScheme) -> bool {
+ match (&self.private, scheme) {
+ (&PrivateKeyType::Rsa(_), &SignatureScheme::RsaSsaPssSha256) => true,
+ (&PrivateKeyType::Rsa(_), &SignatureScheme::RsaSsaPssSha512) => true,
+ (&PrivateKeyType::Ed25519(_), &SignatureScheme::Ed25519) => true,
+ _ => false,
+ }
+ }
+
+ /// Sign a message with the given scheme.
+ pub fn sign(&self, msg: &[u8], scheme: SignatureScheme) -> Result<Signature> {
+ let value = match (&self.private, &scheme) {
+ (&PrivateKeyType::Rsa(ref rsa), &SignatureScheme::RsaSsaPssSha256) => {
+ let mut signing_state = RSASigningState::new(rsa.clone()).map_err(|_| {
+ Error::Opaque("Could not initialize RSA signing state.".into())
+ })?;
+ let rng = SystemRandom::new();
+ let mut buf = vec![0; signing_state.key_pair().public_modulus_len()];
+ signing_state
+ .sign(&RSA_PSS_SHA256, &rng, msg, &mut buf)
+ .map_err(|_| Error::Opaque("Failed to sign message.".into()))?;
+ SignatureValue(buf)
+ }
+ (&PrivateKeyType::Rsa(ref rsa), &SignatureScheme::RsaSsaPssSha512) => {
+ let mut signing_state = RSASigningState::new(rsa.clone()).map_err(|_| {
+ Error::Opaque("Could not initialize RSA signing state.".into())
+ })?;
+ let rng = SystemRandom::new();
+ let mut buf = vec![0; signing_state.key_pair().public_modulus_len()];
+ signing_state
+ .sign(&RSA_PSS_SHA512, &rng, msg, &mut buf)
+ .map_err(|_| Error::Opaque("Failed to sign message.".into()))?;
+ SignatureValue(buf)
+ }
+ (&PrivateKeyType::Ed25519(ref ed), &SignatureScheme::Ed25519) => {
+ SignatureValue(ed.sign(msg).as_ref().into())
+ }
+ (k, s) => {
+ return Err(Error::IllegalArgument(
+ format!("Key {:?} can't be used with scheme {:?}", k, s),
+ ))
+ }
+ };
+
+ Ok(Signature {
+ key_id: self.key_id().clone(),
+ scheme: scheme,
+ value: value,
+ })
+ }
+
+ /// Return the public component of the key.
+ pub fn public(&self) -> &PublicKey {
+ &self.public
+ }
+
+ /// Return the key ID of the public key.
+ pub fn key_id(&self) -> &KeyId {
+ &self.public.key_id
+ }
+}
+
+
+/// A structure containing information about a public key.
+#[derive(Clone, Debug, PartialEq)]
+pub struct PublicKey {
+ typ: KeyType,
+ key_id: KeyId,
+ value: PublicKeyValue,
+}
+
+impl PublicKey {
+ /// Parse DER bytes as an SPKI key.
+ pub fn from_spki(der_bytes: &[u8]) -> Result<Self> {
+ let input = Input::from(der_bytes);
+ let (typ, value) = input.read_all(derp::Error::Read, |input| {
+ derp::nested(input, Tag::Sequence, |input| {
+ let typ = derp::nested(input, Tag::Sequence, |input| {
+ let typ = derp::expect_tag_and_get_value(input, Tag::Oid)?;
+ let typ = KeyType::from_oid(typ.as_slice_less_safe()).map_err(|_| {
+ derp::Error::WrongValue
+ })?;
+ let _ = derp::read_null(input)?;
+ Ok(typ)
+ })?;
+ let value = derp::bit_string_with_no_unused_bits(input)?;
+ Ok((typ, value.as_slice_less_safe().to_vec()))
+ })
+ })?;
+ let key_id = calculate_key_id(der_bytes);
+ Ok(PublicKey {
+ typ: typ,
+ key_id: key_id,
+ value: PublicKeyValue(value),
+ })
+ }
+
+ /// Write the public key as SPKI DER bytes.
+ pub fn as_spki(&self) -> Result<Vec<u8>> {
+ Ok(write_spki(&self.value.0, &self.typ)?)
+ }
+
+ /// An immutable reference to the key's type.
+ pub fn typ(&self) -> &KeyType {
+ &self.typ
+ }
+
+ /// An immutable reference to the key's ID.
+ pub fn key_id(&self) -> &KeyId {
+ &self.key_id
+ }
+
+ /// Use this key to verify a message with a signature.
+ pub fn verify(&self, msg: &[u8], sig: &Signature) -> Result<()> {
+ let alg: &ring::signature::VerificationAlgorithm = match sig.scheme() {
+ &SignatureScheme::Ed25519 => &ED25519,
+ &SignatureScheme::RsaSsaPssSha256 => &RSA_PSS_2048_8192_SHA256,
+ &SignatureScheme::RsaSsaPssSha512 => &RSA_PSS_2048_8192_SHA512,
+ };
+
+ ring::signature::verify(
+ alg,
+ Input::from(&self.value.0),
+ Input::from(msg),
+ Input::from(&sig.value.0),
+ ).map_err(|_| Error::BadSignature)
+ }
+}
+
+impl Serialize for PublicKey {
+ fn serialize<S>(&self, ser: S) -> ::std::result::Result<S::Ok, S::Error>
+ where
+ S: Serializer,
+ {
+ let bytes = self.as_spki().map_err(|e| {
+ SerializeError::custom(format!("Couldn't write key as SPKI: {:?}", e))
+ })?;
+ shims::PublicKey::new(self.typ.clone(), &bytes).serialize(ser)
+ }
+}
+
+impl<'de> Deserialize<'de> for PublicKey {
+ fn deserialize<D: Deserializer<'de>>(de: D) -> ::std::result::Result<Self, D::Error> {
+ let intermediate: shims::PublicKey = Deserialize::deserialize(de)?;
+ let bytes = BASE64URL
+ .decode(intermediate.public_key().as_bytes())
+ .map_err(|e| DeserializeError::custom(format!("{:?}", e)))?;
+
+ // TODO check typ == type in key
+
+ PublicKey::from_spki(&bytes).map_err(|e| {
+ DeserializeError::custom(format!("Couldn't parse key as SPKI: {:?}", e))
+ })
+ }
+}
+
+#[derive(Clone, PartialEq)]
+struct PublicKeyValue(Vec<u8>);
+
+impl Debug for PublicKeyValue {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ write!(f, "PublicKeyValue {{ \"{}\" }}", BASE64URL.encode(&self.0))
+ }
+}
+
+/// A structure that contains a `Signature` and associated data for verifying it.
+#[derive(Debug, PartialEq, Serialize, Deserialize)]
+pub struct Signature {
+ key_id: KeyId,
+ scheme: SignatureScheme,
+ value: SignatureValue,
+}
+
+impl Signature {
+ /// An immutable reference to the `KeyId` that produced the signature.
+ pub fn key_id(&self) -> &KeyId {
+ &self.key_id
+ }
+
+ /// An immutable reference to the `SignatureScheme` used to create this signature.
+ pub fn scheme(&self) -> &SignatureScheme {
+ &self.scheme
+ }
+
+ /// An immutable reference to the `SignatureValue`.
+ pub fn value(&self) -> &SignatureValue {
+ &self.value
+ }
+}
+
+/// The available hash algorithms.
+#[derive(Debug, Clone, Hash, PartialEq, Eq, Serialize, Deserialize)]
+pub enum HashAlgorithm {
+ /// SHA256 as describe in [RFC-6234](https://tools.ietf.org/html/rfc6234)
+ #[serde(rename = "sha256")]
+ Sha256,
+ /// SHA512 as describe in [RFC-6234](https://tools.ietf.org/html/rfc6234)
+ #[serde(rename = "sha512")]
+ Sha512,
+}
+
+/// Wrapper for the value of a hash digest.
+#[derive(Clone, Eq, PartialEq, Hash)]
+pub struct HashValue(Vec<u8>);
+
+impl HashValue {
+ /// Create a new `HashValue` from the given digest bytes.
+ pub fn new(bytes: Vec<u8>) -> Self {
+ HashValue(bytes)
+ }
+
+ /// An immutable reference to the bytes of the hash value.
+ pub fn value(&self) -> &[u8] {
+ &self.0
+ }
+}
+
+impl Serialize for HashValue {
+ fn serialize<S>(&self, ser: S) -> ::std::result::Result<S::Ok, S::Error>
+ where
+ S: Serializer,
+ {
+ BASE64URL.encode(&self.0).serialize(ser)
+ }
+}
+
+impl<'de> Deserialize<'de> for HashValue {
+ fn deserialize<D: Deserializer<'de>>(de: D) -> ::std::result::Result<Self, D::Error> {
+ let s: String = Deserialize::deserialize(de)?;
+ let bytes = BASE64URL.decode(s.as_bytes()).map_err(|e| {
+ DeserializeError::custom(format!("Base64: {:?}", e))
+ })?;
+ Ok(HashValue(bytes))
+ }
+}
+
+impl Debug for HashValue {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ write!(f, "HashValue {{ \"{}\" }}", BASE64URL.encode(&self.0))
+ }
+}
+
+impl Display for HashValue {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ write!(f, "{}", BASE64URL.encode(&self.0))
+ }
+}
+
+fn write_spki(public: &[u8], key_type: &KeyType) -> ::std::result::Result<Vec<u8>, derp::Error> {
+ let mut output = Vec::new();
+ {
+ let mut der = Der::new(&mut output);
+ der.write_sequence(|der| {
+ der.write_sequence(|der| {
+ der.write_element(Tag::Oid, key_type.as_oid())?;
+ der.write_null()
+ })?;
+ der.write_bit_string(0, |der| der.write_raw(public))
+ })?;
+ }
+
+ Ok(output)
+}
+
+fn extract_rsa_pub_from_pkcs8(der_key: &[u8]) -> ::std::result::Result<Vec<u8>, derp::Error> {
+ let input = Input::from(der_key);
+ input.read_all(derp::Error::Read, |input| {
+ derp::nested(input, Tag::Sequence, |input| {
+ if derp::small_nonnegative_integer(input)? != 0 {
+ return Err(derp::Error::WrongValue);
+ }
+
+ derp::nested(input, Tag::Sequence, |input| {
+ let actual_alg_id = derp::expect_tag_and_get_value(input, Tag::Oid)?;
+ if actual_alg_id.as_slice_less_safe() != RSA_SPKI_OID {
+ return Err(derp::Error::WrongValue);
+ }
+ let _ = derp::expect_tag_and_get_value(input, Tag::Null)?;
+ Ok(())
+ })?;
+
+ derp::nested(input, Tag::OctetString, |input| {
+ derp::nested(input, Tag::Sequence, |input| {
+ if derp::small_nonnegative_integer(input)? != 0 {
+ return Err(derp::Error::WrongValue);
+ }
+
+ let n = derp::positive_integer(input)?;
+ let e = derp::positive_integer(input)?;
+ let _ = input.skip_to_end();
+ write_pkcs1(n.as_slice_less_safe(), e.as_slice_less_safe())
+ })
+ })
+ })
+ })
+}
+
+fn write_pkcs1(n: &[u8], e: &[u8]) -> ::std::result::Result<Vec<u8>, derp::Error> {
+ let mut output = Vec::new();
+ {
+ let mut der = Der::new(&mut output);
+ der.write_sequence(|der| {
+ der.write_positive_integer(n)?;
+ der.write_positive_integer(e)
+ })?;
+ }
+
+ Ok(output)
+}
+
+#[cfg(test)]
+mod test {
+ use super::*;
+ use json;
+
+ const RSA_2048_PK8: &'static [u8] = include_bytes!("../tests/rsa/rsa-2048.pk8.der");
+ const RSA_2048_SPKI: &'static [u8] = include_bytes!("../tests/rsa/rsa-2048.spki.der");
+ const RSA_2048_PKCS1: &'static [u8] = include_bytes!("../tests/rsa/rsa-2048.pkcs1.der");
+
+ const RSA_4096_PK8: &'static [u8] = include_bytes!("../tests/rsa/rsa-4096.pk8.der");
+ const RSA_4096_SPKI: &'static [u8] = include_bytes!("../tests/rsa/rsa-4096.spki.der");
+ const RSA_4096_PKCS1: &'static [u8] = include_bytes!("../tests/rsa/rsa-4096.pkcs1.der");
+
+ const ED25519_PK8: &'static [u8] = include_bytes!("../tests/ed25519/ed25519-1.pk8.der");
+
+ #[test]
+ fn parse_rsa_2048_spki() {
+ let key = PublicKey::from_spki(RSA_2048_SPKI).expect("parse spki");
+ assert_eq!(key.typ, KeyType::Rsa);
+ }
+
+ #[test]
+ fn parse_rsa_4096_spki() {
+ let key = PublicKey::from_spki(RSA_4096_SPKI).expect("parse spki");
+ assert_eq!(key.typ, KeyType::Rsa);
+ }
+
+ #[test]
+ fn rsa_2048_read_pkcs8_and_sign() {
+ let key = PrivateKey::from_pkcs8(RSA_2048_PK8).expect("parse pkcs8");
+ let msg = b"test";
+
+ let sig = key.sign(msg, SignatureScheme::RsaSsaPssSha256).expect(
+ "sign msg",
+ );
+ key.public.verify(msg, &sig).expect("verify msg");
+
+ let sig = key.sign(msg, SignatureScheme::RsaSsaPssSha512).expect(
+ "sign msg",
+ );
+ key.public.verify(msg, &sig).expect("verify msg");
+
+ assert!(key.sign(msg, SignatureScheme::Ed25519).is_err());
+ }
+
+ #[test]
+ fn rsa_4096_read_pkcs8_and_sign() {
+ let key = PrivateKey::from_pkcs8(RSA_4096_PK8).expect("parse pkcs8");
+ let msg = b"test";
+
+ let sig = key.sign(msg, SignatureScheme::RsaSsaPssSha256).expect(
+ "sign msg",
+ );
+ key.public.verify(msg, &sig).expect("verify msg");
+
+ let sig = key.sign(msg, SignatureScheme::RsaSsaPssSha512).expect(
+ "sign msg",
+ );
+ key.public.verify(msg, &sig).expect("verify msg");
+
+ assert!(key.sign(msg, SignatureScheme::Ed25519).is_err());
+ }
+
+ #[test]
+ fn extract_pkcs1_from_rsa_2048_pkcs8() {
+ let res = extract_rsa_pub_from_pkcs8(RSA_2048_PK8).expect("parse pkcs8");
+ assert_eq!(res.as_slice(), RSA_2048_PKCS1);
+ }
+
+ #[test]
+ fn extract_pkcs1_from_rsa_4096_pkcs8() {
+ let res = extract_rsa_pub_from_pkcs8(RSA_4096_PK8).expect("parse pkcs8");
+ assert_eq!(res.as_slice(), RSA_4096_PKCS1);
+ }
+
+ #[test]
+ fn ed25519_read_pkcs8_and_sign() {
+ let key = PrivateKey::from_pkcs8(ED25519_PK8).unwrap();
+ let msg = b"test";
+
+ let sig = key.sign(msg, SignatureScheme::Ed25519).unwrap();
+
+ let public = PublicKey::from_spki(&key.public.as_spki().unwrap()).unwrap();
+ public.verify(msg, &sig).unwrap();
+
+ assert!(key.sign(msg, SignatureScheme::RsaSsaPssSha256).is_err());
+ assert!(key.sign(msg, SignatureScheme::RsaSsaPssSha512).is_err());
+ }
+
+ #[test]
+ fn serde_key_id() {
+ let s = "T5vfRrM1iHpgzGwAHe7MbJH_7r4chkOAphV3OPCCv0I=";
+ let jsn = json!(s);
+ let parsed: KeyId = json::from_str(&format!("\"{}\"", s)).unwrap();
+ assert_eq!(parsed, KeyId::from_string(s).unwrap());
+ let encoded = json::to_value(&parsed).unwrap();
+ assert_eq!(encoded, jsn);
+ }
+
+ #[test]
+ fn serde_signature_value() {
+ let s = "T5vfRrM1iHpgzGwAHe7MbJH_7r4chkOAphV3OPCCv0I=";
+ let jsn = json!(s);
+ let parsed: SignatureValue = json::from_str(&format!("\"{}\"", s)).unwrap();
+ assert_eq!(parsed, SignatureValue::from_string(s).unwrap());
+ let encoded = json::to_value(&parsed).unwrap();
+ assert_eq!(encoded, jsn);
+ }
+
+ #[test]
+ fn serde_rsa_public_key() {
+ let der = RSA_2048_SPKI;
+ let pub_key = PublicKey::from_spki(der).unwrap();
+ let encoded = json::to_value(&pub_key).unwrap();
+ let jsn = json!({
+ "type": "rsa",
+ "public_key": BASE64URL.encode(der),
+ });
+ assert_eq!(encoded, jsn);
+ let decoded: PublicKey = json::from_value(encoded).unwrap();
+ assert_eq!(decoded, pub_key);
+ }
+
+ #[test]
+ fn serde_signature() {
+ let key = PrivateKey::from_pkcs8(ED25519_PK8).unwrap();
+ let msg = b"test";
+ let sig = key.sign(msg, SignatureScheme::Ed25519).unwrap();
+ let encoded = json::to_value(&sig).unwrap();
+ let jsn = json!({
+ "key_id": "qfrfBrkB4lBBSDEBlZgaTGS_SrE6UfmON9kP4i3dJFY=",
+ "scheme": "ed25519",
+ "value": "_k0Tsqc8Azod5_UQeyBfx7oOFWbLlbkjScrmqkU4lWATv-D3v5d8sHK7Z\
+ eh4K18zoFc_54gWKZoBfKW6VZ45DA==",
+ });
+ assert_eq!(encoded, jsn);
+
+ let decoded: Signature = json::from_value(encoded).unwrap();
+ assert_eq!(decoded, sig);
+ }
+}
diff --git a/src/error.rs b/src/error.rs
index f2d0518..77009fd 100644
--- a/src/error.rs
+++ b/src/error.rs
@@ -1,82 +1,89 @@
+//! Error types and converters.
+
+use data_encoding::DecodeError;
+use derp;
use hyper;
use json;
-use std::path::Path;
use std::io;
+use std::path::Path;
+use tempfile;
use metadata::Role;
/// Error type for all TUF related errors.
#[derive(Debug, PartialEq, Eq)]
pub enum Error {
- /// ASN.1 parse errors.
- Asn1,
- /// Errors for converting JSON to canonical JSON.
- CanonicalJsonError(String),
- /// The metadata for the given role has expired.
+ /// The metadata had a bad signature.
+ BadSignature,
+ /// There was a problem encoding or decoding.
+ Encoding(String),
+ /// Metadata was expired.
ExpiredMetadata(Role),
- /// Generic error type for more opaque error reporting.
- Generic(String),
- /// An HTTP or network error.
- Http(String),
- /// The TUF configuration was invalid.
- InvalidConfig(String),
- /// Wrapper for IO errors.
- Io(String),
- /// There was an error parsing JSON.
- Json(String),
- /// The calculated and provided hashes for the matadata did not match.
- MetadataHashMismatch(Role),
- /// A necessary piece of metadata was missing.
+ /// An illegal argument was passed into a function.
+ IllegalArgument(String),
+ /// The metadata was missing, so an operation could not be completed.
MissingMetadata(Role),
- /// The signed metadata had duplicate signatures from a particular key.
- NonUniqueSignatures(Role),
- /// The metadata did not provide any hash algorithms that this library can calculate.
- NoSupportedHashAlgorithms,
- /// A piece of metadata exceeded the provided or maximum allowed size.
- OversizedMetadata(Role),
- /// The calculated and provided hashes for the target did not match.
- UnknownRole(String),
- /// The target does not exist in valid metadata or could not be verified.
- UnavailableTarget,
- /// The role did not have enough signatures to meet the required threshold.
- UnmetThreshold(Role),
- /// The key type was not supported by this library.
- UnsupportedKeyType(String),
- /// The signature scheme was not supported by this library.
- UnsupportedSignatureScheme(String),
- /// There was an error in the verification process.
+ /// There were no available hash algorithms.
+ NoSupportedHashAlgorithm,
+ /// The metadata or target was not found.
+ NotFound,
+ /// Opaque error type, to be interpreted similar to HTTP 500. Something went wrong, and you may
+ /// or may not be able to do anything about it.
+ Opaque(String),
+ /// There was a library internal error. These errors are *ALWAYS* bugs and should be reported.
+ Programming(String),
+ /// The target is unavailable. This may mean it is either not in the metadata or the metadata
+ /// chain to the target cannot be fully verified.
+ TargetUnavailable,
+ /// The metadata or target failed to verify.
VerificationFailure(String),
- /// A piece of metadata decreased its version when not allowed.
- VersionDecrease(Role),
+}
+
+impl From<json::error::Error> for Error {
+ fn from(err: json::error::Error) -> Error {
+ Error::Encoding(format!("JSON: {:?}", err))
+ }
}
impl Error {
/// Helper to include the path that causd the error for FS I/O errors.
pub fn from_io(err: io::Error, path: &Path) -> Error {
- Error::Io(format!("Path {:?} : {:?}", path, err))
+ Error::Opaque(format!("Path {:?} : {:?}", path, err))
}
}
impl From<io::Error> for Error {
fn from(err: io::Error) -> Error {
- Error::Io(format!("{:?}", err))
- }
-}
-
-impl From<json::Error> for Error {
- fn from(err: json::Error) -> Error {
- Error::Json(format!("{:?}", err))
+ Error::Opaque(format!("IO: {:?}", err))
}
}
impl From<hyper::error::Error> for Error {
fn from(err: hyper::error::Error) -> Error {
- Error::Http(format!("{:?}", err))
+ Error::Opaque(format!("Hyper: {:?}", err))
}
}
impl From<hyper::error::ParseError> for Error {
fn from(err: hyper::error::ParseError) -> Error {
- Error::Generic(format!("{:?}", err))
+ Error::Opaque(format!("Hyper: {:?}", err))
+ }
+}
+
+impl From<DecodeError> for Error {
+ fn from(err: DecodeError) -> Error {
+ Error::Encoding(format!("{:?}", err))
+ }
+}
+
+impl From<derp::Error> for Error {
+ fn from(err: derp::Error) -> Error {
+ Error::Encoding(format!("DER: {:?}", err))
+ }
+}
+
+impl From<tempfile::PersistError> for Error {
+ fn from(err: tempfile::PersistError) -> Error {
+ Error::Opaque(format!("Error persisting temp file: {:?}", err))
}
}
diff --git a/src/http.rs b/src/http.rs
deleted file mode 100644
index 7a02af7..0000000
--- a/src/http.rs
+++ /dev/null
@@ -1,15 +0,0 @@
-use hyper::client::Client as HttpClient;
-use hyper::client::response::Response;
-use hyper::header::{Headers, UserAgent};
-use url::Url;
-
-use error::Error;
-use util;
-
-pub fn get(http_client: &HttpClient, url: &Url) -> Result<Response, Error> {
- let mut headers = Headers::new();
- headers.set(UserAgent(format!("rust-tuf/{}", env!("CARGO_PKG_VERSION"))));
- let req = http_client.get(util::url_to_hyper_url(url)?)
- .headers(headers);
- Ok(req.send()?)
-}
diff --git a/src/cjson.rs b/src/interchange/cjson.rs
similarity index 86%
rename from src/cjson.rs
rename to src/interchange/cjson.rs
index b624980..f0baee8 100644
--- a/src/cjson.rs
+++ b/src/interchange/cjson.rs
@@ -15,7 +15,6 @@
Bool(bool),
Null,
Number(Number),
- // TODO this needs to be &[u8] and not String
Object(BTreeMap<String, Value>),
String(String),
}
@@ -27,14 +26,14 @@
&Value::Bool(true) => Ok(buf.extend(b"true")),
&Value::Bool(false) => Ok(buf.extend(b"false")),
&Value::Number(Number::I64(n)) => {
- itoa::write(buf, n)
- .map(|_| ())
- .map_err(|err| format!("Write error: {}", err))
+ itoa::write(buf, n).map(|_| ()).map_err(|err| {
+ format!("Write error: {}", err)
+ })
}
&Value::Number(Number::U64(n)) => {
- itoa::write(buf, n)
- .map(|_| ())
- .map_err(|err| format!("Write error: {}", err))
+ itoa::write(buf, n).map(|_| ()).map_err(|err| {
+ format!("Write error: {}", err)
+ })
}
&Value::String(ref s) => {
escape_str(&mut buf, &s).map_err(|err| format!("Write error: {}", err))
@@ -59,7 +58,9 @@
buf.push(b',');
}
first = false;
- escape_str(&mut buf, &k).map_err(|err| format!("Write error: {}", err))?;
+ escape_str(&mut buf, &k).map_err(|err| {
+ format!("Write error: {}", err)
+ })?;
buf.push(b':');
v.write(&mut buf)?;
}
@@ -105,7 +106,8 @@
/// Serializes and escapes a `&str` into a JSON string.
fn escape_str<W>(wr: &mut W, value: &str) -> Result<(), io::Error>
- where W: io::Write
+where
+ W: io::Write,
{
let bytes = value.as_bytes();
@@ -179,9 +181,11 @@
#[test]
fn write_arr() {
- let jsn = Value::Array(vec![Value::String(String::from("wat")),
- Value::String(String::from("lol")),
- Value::String(String::from("no"))]);
+ let jsn = Value::Array(vec![
+ Value::String(String::from("wat")),
+ Value::String(String::from("lol")),
+ Value::String(String::from("no")),
+ ]);
let mut out = Vec::new();
jsn.write(&mut out).expect("write failed");
assert_eq!(&out, b"[\"wat\",\"lol\",\"no\"]");
@@ -190,8 +194,10 @@
#[test]
fn write_obj() {
let mut map = BTreeMap::new();
- let arr = Value::Array(vec![Value::String(String::from("haha")),
- Value::String(String::from("omg so tired"))]);
+ let arr = Value::Array(vec![
+ Value::String(String::from("haha")),
+ Value::String(String::from("omg so tired")),
+ ]);
let _ = map.insert(String::from("lol"), arr);
let jsn = Value::Object(map);
let mut out = Vec::new();
@@ -203,11 +209,15 @@
fn root_json() {
let mut file = File::open("./tests/cjson/root.json").expect("couldn't open root.json");
let mut buf = String::new();
- file.read_to_string(&mut buf).expect("couldn't read root.json");
+ file.read_to_string(&mut buf).expect(
+ "couldn't read root.json",
+ );
let mut file = File::open("./tests/cjson/root.cjson").expect("couldn't open root.cjson");
let mut cjsn = String::new();
- file.read_to_string(&mut cjsn).expect("couldn't read root.cjson");
+ file.read_to_string(&mut cjsn).expect(
+ "couldn't read root.cjson",
+ );
let ref jsn = json::from_str(&buf).expect("not json");
let out = canonicalize(jsn).expect("couldn't canonicalize");
diff --git a/src/interchange/mod.rs b/src/interchange/mod.rs
new file mode 100644
index 0000000..a43eafd
--- /dev/null
+++ b/src/interchange/mod.rs
@@ -0,0 +1,166 @@
+//! Structures and functions to aid in various TUF data interchange formats.
+
+mod cjson;
+
+use json;
+use serde::de::DeserializeOwned;
+use serde::ser::Serialize;
+use std::fmt::Debug;
+use std::io::{Read, Write};
+
+use Result;
+use error::Error;
+
+/// The format used for data interchange, serialization, and deserialization.
+pub trait DataInterchange: Debug + PartialEq {
+ /// The type of data that is contained in the `signed` portion of metadata.
+ type RawData: Serialize + DeserializeOwned;
+
+ /// The data interchange's extension.
+ fn extension() -> &'static str;
+
+ /// A function that canonicalizes data to allow for deterministic signatures.
+ fn canonicalize(raw_data: &Self::RawData) -> Result<Vec<u8>>;
+
+ /// Deserialize from `RawData`.
+ fn deserialize<T>(raw_data: &Self::RawData) -> Result<T>
+ where
+ T: DeserializeOwned;
+
+ /// Serialize into `RawData`.
+ fn serialize<T>(data: &T) -> Result<Self::RawData>
+ where
+ T: Serialize;
+
+ /// Write a struct to a stream.
+ fn to_writer<W, T: ?Sized>(writer: W, value: &T) -> Result<()>
+ where
+ W: Write,
+ T: Serialize;
+
+ /// Read a struct from a stream.
+ fn from_reader<R, T>(rdr: R) -> Result<T>
+ where
+ R: Read,
+ T: DeserializeOwned;
+}
+
+/// JSON data interchange.
+#[derive(Debug, PartialEq)]
+pub struct JsonDataInterchange {}
+impl DataInterchange for JsonDataInterchange {
+ type RawData = json::Value;
+
+ /// ```
+ /// use tuf::interchange::{DataInterchange, JsonDataInterchange};
+ ///
+ /// assert_eq!(JsonDataInterchange::extension(), "json");
+ /// ```
+ fn extension() -> &'static str {
+ "json"
+ }
+
+ /// ```
+ /// use tuf::interchange::{DataInterchange, JsonDataInterchange};
+ /// use std::collections::HashMap;
+ ///
+ /// let jsn: &[u8] = br#"{"foo": "bar", "baz": "quux"}"#;
+ /// let raw = JsonDataInterchange::from_reader(jsn).unwrap();
+ /// let out = JsonDataInterchange::canonicalize(&raw).unwrap();
+ /// assert_eq!(out, br#"{"baz":"quux","foo":"bar"}"#);
+ /// ```
+ fn canonicalize(raw_data: &Self::RawData) -> Result<Vec<u8>> {
+ cjson::canonicalize(raw_data).map_err(|e| Error::Opaque(e))
+ }
+
+ /// ```
+ /// #[macro_use]
+ /// extern crate serde_derive;
+ /// #[macro_use]
+ /// extern crate serde_json;
+ /// extern crate tuf;
+ ///
+ /// use tuf::interchange::{DataInterchange, JsonDataInterchange};
+ /// use std::collections::HashMap;
+ ///
+ /// #[derive(Deserialize, Debug, PartialEq)]
+ /// struct Thing {
+ /// foo: String,
+ /// bar: String,
+ /// }
+ ///
+ /// fn main() {
+ /// let jsn = json!({"foo": "wat", "bar": "lol"});
+ /// let thing = Thing { foo: "wat".into(), bar: "lol".into() };
+ /// let de: Thing = JsonDataInterchange::deserialize(&jsn).unwrap();
+ /// assert_eq!(de, thing);
+ /// }
+ /// ```
+ fn deserialize<T>(raw_data: &Self::RawData) -> Result<T>
+ where
+ T: DeserializeOwned,
+ {
+ Ok(json::from_value(raw_data.clone())?)
+ }
+
+ /// ```
+ /// #[macro_use]
+ /// extern crate serde_derive;
+ /// #[macro_use]
+ /// extern crate serde_json;
+ /// extern crate tuf;
+ ///
+ /// use tuf::interchange::{DataInterchange, JsonDataInterchange};
+ /// use std::collections::HashMap;
+ ///
+ /// #[derive(Serialize)]
+ /// struct Thing {
+ /// foo: String,
+ /// bar: String,
+ /// }
+ ///
+ /// fn main() {
+ /// let jsn = json!({"foo": "wat", "bar": "lol"});
+ /// let thing = Thing { foo: "wat".into(), bar: "lol".into() };
+ /// let se: serde_json::Value = JsonDataInterchange::serialize(&thing).unwrap();
+ /// assert_eq!(se, jsn);
+ /// }
+ /// ```
+ fn serialize<T>(data: &T) -> Result<Self::RawData>
+ where
+ T: Serialize,
+ {
+ Ok(json::to_value(data)?)
+ }
+
+ /// ```
+ /// use tuf::interchange::{DataInterchange, JsonDataInterchange};
+ ///
+ /// let arr = vec![1, 2, 3];
+ /// let mut buf = Vec::new();
+ /// JsonDataInterchange::to_writer(&mut buf, &arr).unwrap();
+ /// assert!(&buf == b"[1, 2, 3]" || &buf == b"[1,2,3]");
+ /// ```
+ fn to_writer<W, T: ?Sized>(writer: W, value: &T) -> Result<()>
+ where
+ W: Write,
+ T: Serialize,
+ {
+ Ok(json::to_writer(writer, value)?)
+ }
+
+ /// ```
+ /// use tuf::interchange::{DataInterchange, JsonDataInterchange};
+ /// use std::collections::HashMap;
+ ///
+ /// let jsn: &[u8] = br#"{"foo": "bar", "baz": "quux"}"#;
+ /// let _: HashMap<String, String> = JsonDataInterchange::from_reader(jsn).unwrap();
+ /// ```
+ fn from_reader<R, T>(rdr: R) -> Result<T>
+ where
+ R: Read,
+ T: DeserializeOwned,
+ {
+ Ok(json::from_reader(rdr)?)
+ }
+}
diff --git a/src/lib.rs b/src/lib.rs
index 292ee33..0a3d3c9 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -1,6 +1,5 @@
//! This crate provides an API for talking to repositories that implement The Update Framework
-//! (TUF). Currently only downloading and verification of metadata is possible, not creating new
-//! metadata or storing targets.
+//! (TUF).
//!
//! If you are unfamiliar with TUF, you should read up on via the [official
//! website](http://theupdateframework.github.io/). This crate aims to implement the entirety of
@@ -8,165 +7,103 @@
//! branch](https://github.com/theupdateframework/tuf/blob/develop/docs/tuf-spec.txt) in the
//! official TUF git repository.
//!
-//! ## Examples
-//!
-//! ### A Standalone Example
+//! # Example
//!
//! ```no_run
+//! extern crate hyper;
//! extern crate tuf;
//! extern crate url;
-//! use tuf::{Tuf, Config, RemoteRepo};
+//!
+//! use hyper::client::Client as HttpClient;
//! use std::path::PathBuf;
+//! use tuf::Tuf;
+//! use tuf::crypto::KeyId;
+//! use tuf::client::{Client, Config};
+//! use tuf::metadata::{RootMetadata, SignedMetadata, Role, MetadataPath,
+//! MetadataVersion};
+//! use tuf::interchange::JsonDataInterchange;
+//! use tuf::repository::{Repository, FileSystemRepository, HttpRepository};
//! use url::Url;
//!
-//! fn main() {
-//! let config = Config::build()
-//! .remote(RemoteRepo::Http(Url::parse("http://localhost:8080/").unwrap()))
-//! .local_path(PathBuf::from("/var/lib/tuf"))
-//! .finish()
-//! .unwrap();
-//! let mut tuf = Tuf::new(config).unwrap();
-//! let path_to_crate = tuf.fetch_target("targets/some_crate/0.1.0/pkg.crate").unwrap();
-//! println!("Crate available at {}", path_to_crate.to_string_lossy());
-//! }
-//!
-//! ```
-//!
-//! The `Tuf` struct is the central piece to using this crate. It handles downloading and verifying
-//! of metadata as well as the storage of metadata and targets.
-//!
-//! ### An Integrated Example
-//!
-//! TUF is designed to be a drop in solution to verifying metadata and targets within an existing
-//! update library.
-//!
-//! Consider the following sample application that
-//!
-//! ```no_run
-//! extern crate url;
-//! use std::path::PathBuf;
-//! use url::Url;
-//!
-//! struct MyUpdater<'a> {
-//! remote_url: Url,
-//! local_cache: PathBuf,
-//! package_list: Vec<&'a str>,
-//! }
-//!
-//! impl<'a> MyUpdater<'a> {
-//! fn new(remote_url: Url, local_cache: PathBuf) -> Self {
-//! MyUpdater {
-//! remote_url: remote_url,
-//! local_cache: local_cache,
-//! package_list: Vec::new(),
-//! }
-//! }
-//!
-//! fn update_lists(&mut self) -> Result<(), String> {
-//! unimplemented!() // idk like some http + fs io probably
-//! }
-//!
-//! fn fetch_package(&self, package: &str) -> Result<PathBuf, String> {
-//! if self.package_list.contains(&package) {
-//! unimplemented!() // moar http + fs io
-//! } else {
-//! return Err("Unknown package".to_string())
-//! }
-//! }
-//! }
+//! static TRUSTED_ROOT_KEY_IDS: &'static [&str] = &[
+//! "diNfThTFm0PI8R-Bq7NztUIvZbZiaC_weJBgcqaHlWw=",
+//! "ar9AgoRsmeEcf6Ponta_1TZu1ds5uXbDemBig30O7ck=",
+//! "T5vfRrM1iHpgzGwAHe7MbJH_7r4chkOAphV3OPCCv0I=",
+//! ];
//!
//! fn main() {
-//! let url = Url::parse("http://crates.io/").unwrap();
-//! let cache = PathBuf::from("/var/lib/my-updater/");
-//! let mut updater = MyUpdater::new(url, cache);
-//! updater.update_lists().unwrap();
-//! let path_to_crate = updater.fetch_package("some_crate/0.1.0").unwrap();
-//! println!("Crate available at {}", path_to_crate.to_string_lossy());
+//! let key_ids: Vec<KeyId> = TRUSTED_ROOT_KEY_IDS.iter()
+//! .map(|k| KeyId::from_string(k).unwrap())
+//! .collect();
+//!
+//! let mut local = FileSystemRepository::new(PathBuf::from("~/.rustup"));
+//!
+//! let mut remote = HttpRepository::new(
+//! Url::parse("https://static.rust-lang.org/").unwrap(),
+//! HttpClient::new(),
+//! Some("rustup/1.4.0".into()));
+//!
+//! let config = Config::build().finish().unwrap();
+//!
+//! // fetching this original root from the network is safe because
+//! // we are using trusted, pinned keys to verify it
+//! let root = remote.fetch_metadata(&Role::Root,
+//! &MetadataPath::from_role(&Role::Root),
+//! &MetadataVersion::None,
+//! config.max_root_size(),
+//! None).unwrap();
+//!
+//! let tuf = Tuf::<JsonDataInterchange>::from_root_pinned(root, &key_ids).unwrap();
+//!
+//! let mut client = Client::new(tuf, config, local, remote).unwrap();
+//! let _ = client.update_local().unwrap();
+//! let _ = client.update_remote().unwrap();
//! }
-//!
-//! ```
-//!
-//! This simple updater (baring some migration shims), could be altered to use TUF as follows.
-//!
-//! ```no_run
-//! extern crate tuf;
-//! extern crate url;
-//! use std::path::PathBuf;
-//! use tuf::{Tuf, Config, RemoteRepo};
-//! use url::Url;
-//!
-//! struct MyUpdater {
-//! tuf: Tuf,
-//! }
-//!
-//! impl MyUpdater {
-//! fn new(remote_url: Url, local_cache: PathBuf) -> Result<Self, String> {
-//! let config = Config::build()
-//! .remote(RemoteRepo::Http(remote_url))
-//! .local_path(local_cache)
-//! .finish()
-//! .map_err(|e| format!("{:?}", e))?;
-//! let tuf = Tuf::new(config)
-//! .map_err(|e| format!("{:?}", e))?;
-//! Ok(MyUpdater {
-//! tuf: tuf,
-//! })
-//! }
-//!
-//! fn update_lists(&mut self) -> Result<(), String> {
-//! self.tuf.update().map_err(|e| format!("{:?}", e))
-//! }
-//!
-//! fn fetch_package(&self, package: &str) -> Result<PathBuf, String> {
-//! self.tuf.fetch_target(&format!("targets/{:?}/pkg.crate", package))
-//! .map_err(|e| format!("{:?}", e))
-//! }
-//! }
-//!
-//! fn main() {
-//! let url = Url::parse("http://crates.io/").unwrap();
-//! let cache = PathBuf::from("/var/lib/my-updater/");
-//! let mut updater = MyUpdater::new(url, cache).unwrap();
-//! updater.update_lists().unwrap();
-//! let path_to_crate = updater.fetch_package("some_crate/0.1.0").unwrap();
-//! println!("Crate available at {}", path_to_crate.to_string_lossy());
-//! }
-//!
//! ```
#![deny(missing_docs)]
extern crate chrono;
extern crate data_encoding;
+extern crate derp;
extern crate env_logger;
extern crate hyper;
extern crate itoa;
#[macro_use]
extern crate log;
-extern crate pem;
+#[cfg(test)]
+#[macro_use]
+extern crate maplit;
extern crate ring;
extern crate serde;
#[macro_use]
extern crate serde_derive;
+
+#[cfg(not(test))]
extern crate serde_json as json;
+#[cfg(test)]
+#[macro_use]
+extern crate serde_json as json;
+
+#[cfg(test)]
+extern crate tempdir;
+extern crate tempfile;
extern crate url;
extern crate untrusted;
extern crate uuid;
-#[macro_use]
-mod util;
+pub mod error;
-mod cjson;
-mod error;
-mod http;
-mod metadata;
-mod rsa;
-mod tuf;
+/// Alias for `Result<T, Error>`.
+pub type Result<T> = ::std::result::Result<T, Error>;
+
+pub mod client;
+pub mod crypto;
+pub mod interchange;
+pub mod metadata;
+pub mod repository;
+mod shims;
+pub mod tuf;
pub use tuf::*;
pub use error::*;
-
-/// Module containing the various metadata components used by TUF.
-pub mod meta {
- pub use metadata::{Key, KeyValue, KeyType};
-}
diff --git a/src/main.rs b/src/main.rs
deleted file mode 100644
index 7e09c25..0000000
--- a/src/main.rs
+++ /dev/null
@@ -1,207 +0,0 @@
-extern crate clap;
-extern crate env_logger;
-#[macro_use]
-extern crate log;
-#[cfg(test)]
-extern crate tempdir;
-extern crate tuf as _tuf;
-extern crate url;
-
-use clap::{App, AppSettings, SubCommand, Arg, ArgMatches, ArgGroup};
-use std::path::PathBuf;
-use _tuf::{Tuf, Config, Error, RemoteRepo};
-use url::Url;
-
-fn main() {
- let matches = parser().get_matches();
- env_logger::init().unwrap();
-
- match run_main(matches) {
- Ok(()) => std::process::exit(0),
- Err(e) => {
- error!("{:?}", e);
- std::process::exit(1);
- }
- }
-}
-
-fn run_main(matches: ArgMatches) -> Result<(), Error> {
- let remote = matches.value_of("url").map(|u| RemoteRepo::Http(Url::parse(u).unwrap()))
- .or_else(|| matches.value_of("file").map(|p| RemoteRepo::File(PathBuf::from(p))))
- .unwrap();
- let config = Config::build().remote(remote)
- .local_path(PathBuf::from(matches.value_of("path").unwrap()))
- .init(false)
- .finish()?;
-
- if let Some(matches) = matches.subcommand_matches("fetch") {
- let tuf = Tuf::new(config)?;
- cmd_fetch(&tuf, matches.value_of("target").unwrap())
- } else if let Some(_) = matches.subcommand_matches("init") {
- let path = PathBuf::from(matches.value_of("path").unwrap());
- cmd_init(&path)
- } else if let Some(_) = matches.subcommand_matches("update") {
- let mut tuf = Tuf::new(config)?;
- cmd_update(&mut tuf)
- } else if let Some(matches) = matches.subcommand_matches("verify") {
- let mut tuf = Tuf::new(config)?;
- cmd_verify(&mut tuf, matches.value_of("target").unwrap())
- } else {
- unreachable!() // because of AppSettings::SubcommandRequiredElseHelp
- }
-}
-
-fn url_validator(url: String) -> Result<(), String> {
- Url::parse(&url)
- .map(|_| ())
- .map_err(|_| "URL was not valid".into())
-}
-
-fn parser<'a, 'b>() -> App<'a, 'b> {
- App::new("tuf")
- .version(env!("CARGO_PKG_VERSION"))
- .about("CLI tool for verifying TUF metadata and downloading targets")
- .settings(&[AppSettings::SubcommandRequiredElseHelp])
- .arg(Arg::with_name("verbose")
- .short("v")
- .long("verbose")
- .multiple(true)
- .help("Increase the verbosity of output to stderr"))
- .arg(Arg::with_name("url")
- .short("U")
- .long("url")
- .takes_value(true)
- .validator(url_validator)
- .help("URL of the TUF repo"))
- .arg(Arg::with_name("file")
- .short("f")
- .long("file")
- .takes_value(true)
- .help("Path to the TUF repo (remote)"))
- .arg(Arg::with_name("path")
- .short("p")
- .long("path")
- .takes_value(true)
- .required(true)
- .help("Local path the TUF repo"))
- .group(ArgGroup::with_name("remote_repo")
- .args(&["url", "file"])
- .required(true))
- .subcommand(SubCommand::with_name("fetch").about("Fetch a target")
- .arg(Arg::with_name("target")
- .takes_value(true)
- .required(true)
- .help("The full (non-local) path of the target to verify")))
- .subcommand(SubCommand::with_name("init").about("Initializes a new TUF repo"))
- .subcommand(SubCommand::with_name("update").about("Updates metadata from remotes"))
- .subcommand(SubCommand::with_name("verify")
- .about("Verifies a target")
- .arg(Arg::with_name("target")
- .takes_value(true)
- .required(true)
- .help("The full (non-local) path of the target to verify")))
-}
-
-fn cmd_fetch(tuf: &Tuf, target: &str) -> Result<(), Error> {
- tuf.fetch_target(target)
- .map(|_| ())
-}
-
-fn cmd_init(local_path: &PathBuf) -> Result<(), Error> {
- Tuf::initialize(local_path)
-}
-
-fn cmd_update(tuf: &mut Tuf) -> Result<(), Error> {
- tuf.update()
-}
-
-fn cmd_verify(tuf: &mut Tuf, target: &str) -> Result<(), Error> {
- tuf.fetch_target(target)
- .map(|_| ())
-}
-
-#[cfg(test)]
-mod test {
- use super::*;
- use std::fs::{self, DirBuilder};
- use std::path::{Path, PathBuf};
- use tempdir::TempDir;
-
- fn vector_path() -> PathBuf {
- PathBuf::from(env!("CARGO_MANIFEST_DIR"))
- .join("tests")
- .join("tuf-test-vectors")
- .join("tuf")
- .join("001")
- .join("repo")
- }
-
- #[test]
- fn test_clap() {
- let _ = parser();
- }
-
- fn init_temp(temp: &Path) {
- let dir = PathBuf::from("metadata").join("current");
- DirBuilder::new()
- .recursive(true)
- .create(temp.join(dir.clone()))
- .expect(&format!("couldn't create path {}:", temp.join(dir).to_string_lossy()));
-
- let copy_path = vector_path().join("root.json");
- fs::copy(copy_path,
- temp.join("metadata").join("current").join("root.json"))
- .expect(&format!("copy failed for target"));
- }
-
- #[test]
- fn run_it() {
- let temp = TempDir::new("rust-tuf").expect("couldn't make temp dir");
- init_temp(temp.path());
- let path = vector_path();
- println!("Temp path: {:?}", temp.path());
- println!("Test path: {:?}", path);
-
- let matches = parser()
- .get_matches_from_safe(vec!["tuf",
- "--file",
- &path.to_string_lossy(),
- "--path",
- temp.path().to_str().expect("path not utf-8"),
- "init"])
- .expect("parse error");
- assert_eq!(run_main(matches), Ok(()));
-
- let matches = parser()
- .get_matches_from_safe(vec!["tuf",
- "--file",
- &path.to_string_lossy(),
- "--path",
- temp.path().to_str().expect("path not utf-8"),
- "update"])
- .expect("parse error");
- assert_eq!(run_main(matches), Ok(()));
-
- let matches = parser()
- .get_matches_from_safe(vec!["tuf",
- "--file",
- &path.to_string_lossy(),
- "--path",
- temp.path().to_str().expect("path not utf-8"),
- "fetch",
- "targets/file.txt"])
- .expect("parse error");
- assert_eq!(run_main(matches), Ok(()));
-
- let matches = parser()
- .get_matches_from_safe(vec!["tuf",
- "--file",
- &path.to_string_lossy(),
- "--path",
- temp.path().to_str().expect("path not utf-8"),
- "verify",
- "targets/file.txt"])
- .expect("parse error");
- assert_eq!(run_main(matches), Ok(()));
- }
-}
diff --git a/src/metadata.rs b/src/metadata.rs
index 7d77a93..406e2ab 100644
--- a/src/metadata.rs
+++ b/src/metadata.rs
@@ -1,890 +1,1275 @@
-use chrono::{DateTime, UTC};
-use data_encoding::HEXLOWER;
-use json;
-use pem;
-use ring;
-use ring::digest::{digest, SHA256};
-use ring::signature::{ED25519, RSA_PSS_2048_8192_SHA256, RSA_PSS_2048_8192_SHA512};
+//! Structures used to represent TUF metadata
+
+use chrono::DateTime;
+use chrono::offset::Utc;
+use ring::digest::{self, SHA256, SHA512};
use serde::de::{Deserialize, DeserializeOwned, Deserializer, Error as DeserializeError};
-use std::collections::HashMap;
-use std::fmt::{self, Display, Formatter, Debug};
+use serde::ser::{Serialize, Serializer, Error as SerializeError};
+use std::collections::{HashMap, HashSet};
+use std::fmt::{self, Debug, Display};
+use std::io::Read;
use std::marker::PhantomData;
-use std::str::FromStr;
-use untrusted::Input;
-use cjson::canonicalize;
+use Result;
+use crypto::{KeyId, PublicKey, Signature, HashAlgorithm, HashValue, SignatureScheme, PrivateKey};
use error::Error;
-use rsa::convert_to_pkcs1;
+use interchange::DataInterchange;
+use shims;
-static HASH_PREFERENCES: &'static [HashType] = &[HashType::Sha512, HashType::Sha256];
+static PATH_ILLEGAL_COMPONENTS: &'static [&str] = &[
+ "", // empty
+ ".", // current dir
+ "..", // parent dir
+ // TODO ? "0", // may translate to nul in windows
+];
-#[derive(Eq, PartialEq, Deserialize, Debug, Clone)]
-pub enum Role {
- Root,
- Targets,
- Timestamp,
- Snapshot,
- TargetsDelegation(String),
+static PATH_ILLEGAL_COMPONENTS_CASE_INSENSITIVE: &'static [&str] = &[
+ // DOS device files
+ "CON",
+ "PRN",
+ "AUX",
+ "NUL",
+ "COM1",
+ "COM2",
+ "COM3",
+ "COM4",
+ "COM5",
+ "COM6",
+ "COM7",
+ "COM8",
+ "COM9",
+ "LPT1",
+ "LPT2",
+ "LPT3",
+ "LPT4",
+ "LPT5",
+ "LPT6",
+ "LPT7",
+ "LPT8",
+ "LPT9",
+ "KEYBD$",
+ "CLOCK$",
+ "SCREEN$",
+ "$IDLE$",
+ "CONFIG$",
+];
+
+static PATH_ILLEGAL_STRINGS: &'static [&str] = &[
+ "\\", // for windows compatibility
+ "<",
+ ">",
+ "\"",
+ "|",
+ "?",
+ "*",
+ // control characters, all illegal in FAT
+ "\u{000}",
+ "\u{001}",
+ "\u{002}",
+ "\u{003}",
+ "\u{004}",
+ "\u{005}",
+ "\u{006}",
+ "\u{007}",
+ "\u{008}",
+ "\u{009}",
+ "\u{00a}",
+ "\u{00b}",
+ "\u{00c}",
+ "\u{00d}",
+ "\u{00e}",
+ "\u{00f}",
+ "\u{010}",
+ "\u{011}",
+ "\u{012}",
+ "\u{013}",
+ "\u{014}",
+ "\u{015}",
+ "\u{016}",
+ "\u{017}",
+ "\u{018}",
+ "\u{019}",
+ "\u{01a}",
+ "\u{01b}",
+ "\u{01c}",
+ "\u{01d}",
+ "\u{01e}",
+ "\u{01f}",
+ "\u{07f}",
+];
+
+fn safe_path(path: &str) -> Result<()> {
+ if path.starts_with("/") {
+ return Err(Error::IllegalArgument("Cannot start with '/'".into()));
+ }
+
+ for bad_str in PATH_ILLEGAL_STRINGS {
+ if path.contains(bad_str) {
+ return Err(Error::IllegalArgument(
+ format!("Path cannot contain {:?}", bad_str),
+ ));
+ }
+ }
+
+ for component in path.split('/') {
+ for bad_str in PATH_ILLEGAL_COMPONENTS {
+ if component == *bad_str {
+ return Err(Error::IllegalArgument(
+ format!("Path cannot have component {:?}", component),
+ ));
+ }
+ }
+
+ let component_lower = component.to_lowercase();
+ for bad_str in PATH_ILLEGAL_COMPONENTS_CASE_INSENSITIVE {
+ if component_lower.as_str() == *bad_str {
+ return Err(Error::IllegalArgument(
+ format!("Path cannot have component {:?}", component),
+ ));
+ }
+ }
+ }
+
+ Ok(())
}
-impl FromStr for Role {
- type Err = Error;
+/// The TUF role.
+#[derive(Debug, PartialEq, Eq, Hash, Serialize, Deserialize)]
+pub enum Role {
+ /// The root role.
+ #[serde(rename = "root")]
+ Root,
+ /// The snapshot role.
+ #[serde(rename = "snapshot")]
+ Snapshot,
+ /// The targets role.
+ #[serde(rename = "targets")]
+ Targets,
+ /// The timestamp role.
+ #[serde(rename = "timestamp")]
+ Timestamp,
+}
- fn from_str(s: &str) -> Result<Self, Self::Err> {
- match s {
- "Root" => Ok(Role::Root),
- "Snapshot" => Ok(Role::Snapshot),
- "Targets" => Ok(Role::Targets),
- "Timestamp" => Ok(Role::Timestamp),
- role => Err(Error::UnknownRole(String::from(role))),
+impl Role {
+ /// Check if this role could be associated with a given path.
+ ///
+ /// ```
+ /// use tuf::metadata::{MetadataPath, Role};
+ ///
+ /// assert!(Role::Root.fuzzy_matches_path(&MetadataPath::from_role(&Role::Root)));
+ /// assert!(Role::Snapshot.fuzzy_matches_path(&MetadataPath::from_role(&Role::Snapshot)));
+ /// assert!(Role::Targets.fuzzy_matches_path(&MetadataPath::from_role(&Role::Targets)));
+ /// assert!(Role::Timestamp.fuzzy_matches_path(&MetadataPath::from_role(&Role::Timestamp)));
+ ///
+ /// assert!(!Role::Root.fuzzy_matches_path(&MetadataPath::from_role(&Role::Snapshot)));
+ /// assert!(!Role::Root.fuzzy_matches_path(&MetadataPath::new("wat".into()).unwrap()));
+ /// ```
+ pub fn fuzzy_matches_path(&self, path: &MetadataPath) -> bool {
+ match self {
+ &Role::Root if &path.0 == "root" => true,
+ &Role::Snapshot if &path.0 == "snapshot" => true,
+ &Role::Timestamp if &path.0 == "timestamp" => true,
+ &Role::Targets if &path.0 == "targets" => true,
+ // TODO delegation support
+ _ => false,
}
}
}
impl Display for Role {
- fn fmt(&self, f: &mut Formatter) -> fmt::Result {
- match *self {
- Role::Root => write!(f, "{}", "root"),
- Role::Targets => write!(f, "{}", "targets"),
- Role::Snapshot => write!(f, "{}", "snapshot"),
- Role::Timestamp => write!(f, "{}", "timestamp"),
- Role::TargetsDelegation(ref s) => write!(f, "{}", s),
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+ &Role::Root => write!(f, "root"),
+ &Role::Snapshot => write!(f, "snapshot"),
+ &Role::Targets => write!(f, "targets"),
+ &Role::Timestamp => write!(f, "timestamp"),
}
}
}
-pub trait RoleType: Debug + Clone{
- fn matches(role: &Role) -> bool;
+/// Enum used for addressing versioned TUF metadata.
+#[derive(Debug, PartialEq, Eq, Clone, Hash)]
+pub enum MetadataVersion {
+ /// The metadata is unversioned.
+ None,
+ /// The metadata is addressed by a specific version number.
+ Number(u32),
+ /// The metadata is addressed by a hash prefix. Used with TUF's consistent snapshot feature.
+ Hash(HashValue),
}
-#[derive(Debug, Clone)]
-pub struct Root {}
-impl RoleType for Root {
- fn matches(role: &Role) -> bool {
- match role {
- &Role::Root => true,
- _ => false,
+impl MetadataVersion {
+ /// Converts this struct into the string used for addressing metadata.
+ pub fn prefix(&self) -> String {
+ match self {
+ &MetadataVersion::None => String::new(),
+ &MetadataVersion::Number(ref x) => format!("{}.", x),
+ &MetadataVersion::Hash(ref v) => format!("{}.", v),
}
}
}
-#[derive(Debug, Clone)]
-pub struct Targets {}
-impl RoleType for Targets {
- fn matches(role: &Role) -> bool {
- match role {
- &Role::Targets => true,
- _ => false,
- }
+/// Top level trait used for role metadata.
+pub trait Metadata: Debug + PartialEq + Serialize + DeserializeOwned {
+ /// The role associated with the metadata.
+ fn role() -> Role;
+}
+
+/// A piece of raw metadata with attached signatures.
+#[derive(Debug, PartialEq, Serialize, Deserialize)]
+pub struct SignedMetadata<D, M>
+where
+ D: DataInterchange,
+ M: Metadata,
+{
+ signatures: Vec<Signature>,
+ signed: D::RawData,
+ #[serde(skip_serializing, skip_deserializing)]
+ _interchage: PhantomData<D>,
+ #[serde(skip_serializing, skip_deserializing)]
+ _metadata: PhantomData<M>,
+}
+
+impl<D, M> SignedMetadata<D, M>
+where
+ D: DataInterchange,
+ M: Metadata,
+{
+ /// Create a new `SignedMetadata`.
+ pub fn new(
+ metadata: &M,
+ private_key: &PrivateKey,
+ scheme: SignatureScheme,
+ ) -> Result<SignedMetadata<D, M>> {
+ let raw = D::serialize(metadata)?;
+ let bytes = D::canonicalize(&raw)?;
+ let sig = private_key.sign(&bytes, scheme)?;
+ Ok(SignedMetadata {
+ signatures: vec![sig],
+ signed: raw,
+ _interchage: PhantomData,
+ _metadata: PhantomData,
+ })
}
-}
-#[derive(Debug, Clone)]
-pub struct Timestamp {}
-impl RoleType for Timestamp {
- fn matches(role: &Role) -> bool {
- match role {
- &Role::Timestamp => true,
- _ => false,
- }
+ /// An immutable reference to the signatures.
+ pub fn signatures(&self) -> &[Signature] {
+ &self.signatures
}
-}
-#[derive(Debug, Clone)]
-pub struct Snapshot {}
-impl RoleType for Snapshot {
- fn matches(role: &Role) -> bool {
- match role {
- &Role::Snapshot => true,
- _ => false,
- }
+ /// A mutable reference to the signatures.
+ pub fn signatures_mut(&mut self) -> &mut Vec<Signature> {
+ &mut self.signatures
}
-}
-#[derive(Debug, Clone)]
-pub struct SignedMetadata<R: RoleType + Clone> {
- pub signatures: Vec<Signature>,
- pub signed: json::Value,
- _role: PhantomData<R>,
-}
+ /// An immutable reference to the raw data.
+ pub fn signed(&self) -> &D::RawData {
+ &self.signed
+ }
-impl<'de, R: RoleType> Deserialize<'de> for SignedMetadata<R> {
- fn deserialize<D: Deserializer<'de>>(de: D) -> Result<Self, D::Error> {
- if let json::Value::Object(mut object) = Deserialize::deserialize(de)? {
- match (object.remove("signatures"), object.remove("signed")) {
- (Some(a @ json::Value::Array(_)), Some(v @ json::Value::Object(_))) => {
- Ok(SignedMetadata::<R> {
- signatures: json::from_value(a).map_err(|e| {
- DeserializeError::custom(format!("Bad signature data: {}", e))
- })?,
- signed: v.clone(),
- _role: PhantomData,
- })
+ /// Verify this metadata.
+ pub fn verify(
+ &self,
+ threshold: u32,
+ authorized_key_ids: &HashSet<KeyId>,
+ available_keys: &HashMap<KeyId, PublicKey>,
+ ) -> Result<()> {
+ if self.signatures.len() < 1 {
+ return Err(Error::VerificationFailure(
+ "The metadata was not signed with any authorized keys."
+ .into(),
+ ));
+ }
+
+ if threshold < 1 {
+ return Err(Error::VerificationFailure(
+ "Threshold must be strictly greater than zero".into(),
+ ));
+ }
+
+ let canonical_bytes = D::canonicalize(&self.signed)?;
+
+ let mut signatures_needed = threshold;
+ for sig in self.signatures.iter() {
+ if !authorized_key_ids.contains(sig.key_id()) {
+ warn!(
+ "Key ID {:?} is not authorized to sign root metadata.",
+ sig.key_id()
+ );
+ continue;
+ }
+
+ match available_keys.get(sig.key_id()) {
+ Some(ref pub_key) => {
+ match pub_key.verify(&canonical_bytes, &sig) {
+ Ok(()) => {
+ debug!("Good signature from key ID {:?}", pub_key.key_id());
+ signatures_needed -= 1;
+ }
+ Err(e) => {
+ warn!("Bad signature from key ID {:?}: {:?}", pub_key.key_id(), e);
+ }
+ }
}
- _ => {
- Err(DeserializeError::custom("Metadata missing 'signed' or 'signatures' \
- section"))
+ None => {
+ warn!(
+ "Key ID {:?} was not found in the set of available keys.",
+ sig.key_id()
+ );
}
}
+ if signatures_needed == 0 {
+ break;
+ }
+ }
+
+ if signatures_needed == 0 {
+ Ok(())
} else {
- Err(DeserializeError::custom("Metadata was not an object"))
+ Err(Error::VerificationFailure(format!(
+ "Signature threshold not met: {}/{}",
+ threshold - signatures_needed,
+ threshold
+ )))
}
}
}
-pub trait Metadata<R: RoleType>: DeserializeOwned {
- fn expires(&self) -> &DateTime<UTC>;
-}
-
-
+/// Metadata for the root role.
#[derive(Debug, PartialEq)]
pub struct RootMetadata {
+ version: u32,
+ expires: DateTime<Utc>,
consistent_snapshot: bool,
- expires: DateTime<UTC>,
- pub version: i32,
- pub keys: HashMap<KeyId, Key>,
- pub root: RoleDefinition,
- pub targets: RoleDefinition,
- pub timestamp: RoleDefinition,
- pub snapshot: RoleDefinition,
+ keys: HashMap<KeyId, PublicKey>,
+ root: RoleDefinition,
+ snapshot: RoleDefinition,
+ targets: RoleDefinition,
+ timestamp: RoleDefinition,
}
-impl Metadata<Root> for RootMetadata {
- fn expires(&self) -> &DateTime<UTC> {
+impl RootMetadata {
+ /// Create new `RootMetadata`.
+ pub fn new(
+ version: u32,
+ expires: DateTime<Utc>,
+ consistent_snapshot: bool,
+ mut keys: Vec<PublicKey>,
+ root: RoleDefinition,
+ snapshot: RoleDefinition,
+ targets: RoleDefinition,
+ timestamp: RoleDefinition,
+ ) -> Result<Self> {
+ if version < 1 {
+ return Err(Error::IllegalArgument(format!(
+ "Metadata version must be greater than zero. Found: {}",
+ version
+ )));
+ }
+
+ let keys = keys.drain(0..)
+ .map(|k| (k.key_id().clone(), k))
+ .collect::<HashMap<KeyId, PublicKey>>();
+
+ Ok(RootMetadata {
+ version: version,
+ expires: expires,
+ consistent_snapshot: consistent_snapshot,
+ keys: keys,
+ root: root,
+ snapshot: snapshot,
+ targets: targets,
+ timestamp: timestamp,
+ })
+ }
+
+ /// The version number.
+ pub fn version(&self) -> u32 {
+ self.version
+ }
+
+ /// An immutable reference to the metadata's expiration `DateTime`.
+ pub fn expires(&self) -> &DateTime<Utc> {
&self.expires
}
+
+ /// Whether or not this repository is currently implementing that TUF consistent snapshot
+ /// feature.
+ pub fn consistent_snapshot(&self) -> bool {
+ self.consistent_snapshot
+ }
+
+ /// An immutable reference to the map of trusted keys.
+ pub fn keys(&self) -> &HashMap<KeyId, PublicKey> {
+ &self.keys
+ }
+
+ /// An immutable reference to the root role's definition.
+ pub fn root(&self) -> &RoleDefinition {
+ &self.root
+ }
+
+ /// An immutable reference to the snapshot role's definition.
+ pub fn snapshot(&self) -> &RoleDefinition {
+ &self.snapshot
+ }
+
+ /// An immutable reference to the targets role's definition.
+ pub fn targets(&self) -> &RoleDefinition {
+ &self.targets
+ }
+
+ /// An immutable reference to the timestamp role's definition.
+ pub fn timestamp(&self) -> &RoleDefinition {
+ &self.timestamp
+ }
+}
+
+impl Metadata for RootMetadata {
+ fn role() -> Role {
+ Role::Root
+ }
+}
+
+impl Serialize for RootMetadata {
+ fn serialize<S>(&self, ser: S) -> ::std::result::Result<S::Ok, S::Error>
+ where
+ S: Serializer,
+ {
+ let m = shims::RootMetadata::from(self).map_err(|e| {
+ SerializeError::custom(format!("{:?}", e))
+ })?;
+ m.serialize(ser)
+ }
}
impl<'de> Deserialize<'de> for RootMetadata {
- fn deserialize<D: Deserializer<'de>>(de: D) -> Result<Self, D::Error> {
- if let json::Value::Object(mut object) = Deserialize::deserialize(de)? {
- let typ = json::from_value::<Role>(object.remove("_type")
- .ok_or_else(|| DeserializeError::custom("Field '_type' missing"))?)
- .map_err(|e| {
- DeserializeError::custom(format!("Field '_type' not a valid role: {}", e))
- })?;
-
- if typ != Role::Root {
- return Err(DeserializeError::custom("Field '_type' was not 'Root'"));
- }
-
- let keys = json::from_value(object.remove("keys")
- .ok_or_else(|| DeserializeError::custom("Field 'keys' missing"))?).map_err(|e| {
- DeserializeError::custom(format!("Field 'keys' not a valid key map: {}", e))
- })?;
-
- let expires = json::from_value(object.remove("expires")
- .ok_or_else(|| DeserializeError::custom("Field 'expires' missing"))?).map_err(|e| {
- DeserializeError::custom(format!("Field 'expires' did not have a valid format: {}", e))
- })?;
-
- let version = json::from_value(object.remove("version")
- .ok_or_else(|| DeserializeError::custom("Field 'version' missing"))?).map_err(|e| {
- DeserializeError::custom(format!("Field 'version' did not have a valid format: {}", e))
- })?;
-
- let consistent_snapshot = json::from_value(object.remove("consistent_snapshot")
- .ok_or_else(|| DeserializeError::custom("Field 'consistent_snapshot' missing"))?).map_err(|e| {
- DeserializeError::custom(format!("Field 'consistent_snapshot' did not have a valid format: {}", e))
- })?;
-
- let mut roles = object.remove("roles")
- .and_then(|v| match v {
- json::Value::Object(o) => Some(o),
- _ => None,
- })
- .ok_or_else(|| DeserializeError::custom("Field 'roles' missing"))?;
-
- let root = json::from_value(roles.remove("root")
- .ok_or_else(|| DeserializeError::custom("Role 'root' missing"))?)
- .map_err(|e| {
- DeserializeError::custom(format!("Root role definition error: {}", e))
- })?;
-
- let targets = json::from_value(roles.remove("targets")
- .ok_or_else(|| DeserializeError::custom("Role 'targets' missing"))?)
- .map_err(|e| {
- DeserializeError::custom(format!("Targets role definition error: {}", e))
- })?;
-
- let timestamp = json::from_value(roles.remove("timestamp")
- .ok_or_else(|| DeserializeError::custom("Role 'timestamp' missing"))?)
- .map_err(|e| {
- DeserializeError::custom(format!("Timetamp role definition error: {}", e))
- })?;
-
- let snapshot = json::from_value(roles.remove("snapshot")
- .ok_or_else(|| DeserializeError::custom("Role 'shapshot' missing"))?)
- .map_err(|e| {
- DeserializeError::custom(format!("Snapshot role definition error: {}", e))
- })?;
-
- Ok(RootMetadata {
- consistent_snapshot,
- expires: expires,
- version: version,
- keys: keys,
- root: root,
- targets: targets,
- timestamp: timestamp,
- snapshot: snapshot,
- })
- } else {
- Err(DeserializeError::custom("Role was not an object"))
- }
+ fn deserialize<D: Deserializer<'de>>(de: D) -> ::std::result::Result<Self, D::Error> {
+ let intermediate: shims::RootMetadata = Deserialize::deserialize(de)?;
+ intermediate.try_into().map_err(|e| {
+ DeserializeError::custom(format!("{:?}", e))
+ })
}
}
-#[derive(Clone, PartialEq, Debug)]
+/// The definition of what allows a role to be trusted.
+#[derive(Clone, Debug, PartialEq)]
pub struct RoleDefinition {
- pub key_ids: Vec<KeyId>,
- pub threshold: i32,
+ threshold: u32,
+ key_ids: HashSet<KeyId>,
+}
+
+impl RoleDefinition {
+ /// Create a new `RoleDefinition` with a given threshold and set of authorized `KeyID`s.
+ pub fn new(threshold: u32, key_ids: HashSet<KeyId>) -> Result<Self> {
+ if threshold < 1 {
+ return Err(Error::IllegalArgument(format!("Threshold: {}", threshold)));
+ }
+
+ if key_ids.is_empty() {
+ return Err(Error::IllegalArgument(
+ "Cannot define a role with no associated key IDs".into(),
+ ));
+ }
+
+ if (key_ids.len() as u64) < (threshold as u64) {
+ return Err(Error::IllegalArgument(format!(
+ "Cannot have a threshold greater than the number of associated key IDs. {} vs. {}",
+ threshold,
+ key_ids.len()
+ )));
+ }
+
+ Ok(RoleDefinition {
+ threshold: threshold,
+ key_ids: key_ids,
+ })
+ }
+
+ /// The threshold number of signatures required for the role to be trusted.
+ pub fn threshold(&self) -> u32 {
+ self.threshold
+ }
+
+ /// An immutable reference to the set of `KeyID`s that are authorized to sign the role.
+ pub fn key_ids(&self) -> &HashSet<KeyId> {
+ &self.key_ids
+ }
+}
+
+impl Serialize for RoleDefinition {
+ fn serialize<S>(&self, ser: S) -> ::std::result::Result<S::Ok, S::Error>
+ where
+ S: Serializer,
+ {
+ shims::RoleDefinition::from(self)
+ .map_err(|e| SerializeError::custom(format!("{:?}", e)))?
+ .serialize(ser)
+ }
}
impl<'de> Deserialize<'de> for RoleDefinition {
- fn deserialize<D: Deserializer<'de>>(de: D) -> Result<Self, D::Error> {
- if let json::Value::Object(mut object) = Deserialize::deserialize(de)? {
- let key_ids = json::from_value(object.remove("keyids")
- .ok_or_else(|| DeserializeError::custom("Field 'keyids' missing"))?).map_err(|e| {
- DeserializeError::custom(format!("Field 'keyids' not a valid array: {}", e))
- })?;
-
- let threshold = json::from_value(object.remove("threshold")
- .ok_or_else(|| DeserializeError::custom("Field 'threshold' missing"))?).map_err(|e| {
- DeserializeError::custom(format!("Field 'threshold' not a an int: {}", e))
- })?;
-
- if threshold <= 0 {
- return Err(DeserializeError::custom("'threshold' must be >= 1"));
- }
-
-
- Ok(RoleDefinition {
- key_ids: key_ids,
- threshold: threshold,
- })
- } else {
- Err(DeserializeError::custom("Role definition was not an object"))
- }
+ fn deserialize<D: Deserializer<'de>>(de: D) -> ::std::result::Result<Self, D::Error> {
+ let intermediate: shims::RoleDefinition = Deserialize::deserialize(de)?;
+ intermediate.try_into().map_err(|e| {
+ DeserializeError::custom(format!("{:?}", e))
+ })
}
}
-#[derive(Debug, Clone)]
-pub struct TargetsMetadata {
- expires: DateTime<UTC>,
- pub version: i32,
- pub delegations: Option<Delegations>,
- pub targets: HashMap<String, TargetInfo>,
-}
+/// Wrapper for a path to metadata
+#[derive(Debug, Clone, PartialEq, Hash, Eq, Serialize)]
+pub struct MetadataPath(String);
-impl Metadata<Targets> for TargetsMetadata {
- fn expires(&self) -> &DateTime<UTC> {
- &self.expires
+impl MetadataPath {
+ /// Create a new `MetadataPath` from a `String`.
+ ///
+ /// ```
+ /// use tuf::metadata::MetadataPath;
+ ///
+ /// assert!(MetadataPath::new("foo".into()).is_ok());
+ /// assert!(MetadataPath::new("/foo".into()).is_err());
+ /// assert!(MetadataPath::new("../foo".into()).is_err());
+ /// assert!(MetadataPath::new("foo/".into()).is_err());
+ /// assert!(MetadataPath::new("foo/..".into()).is_err());
+ /// assert!(MetadataPath::new("foo/../bar".into()).is_err());
+ /// assert!(MetadataPath::new("..foo".into()).is_ok());
+ /// assert!(MetadataPath::new("foo//bar".into()).is_err());
+ /// assert!(MetadataPath::new("foo/..bar".into()).is_ok());
+ /// assert!(MetadataPath::new("foo/bar..".into()).is_ok());
+ /// ```
+ pub fn new(path: String) -> Result<Self> {
+ safe_path(&path)?;
+ Ok(MetadataPath(path))
+ }
+
+ /// Create a metadata path from the given role.
+ /// ```
+ /// use tuf::metadata::{Role, MetadataPath};
+ ///
+ /// assert_eq!(MetadataPath::from_role(&Role::Root),
+ /// MetadataPath::new("root".into()))
+ /// assert_eq!(MetadataPath::from_role(&Role::Snapshot),
+ /// MetadataPath::new("snapshot".into()))
+ /// assert_eq!(MetadataPath::from_role(&Role::Targets),
+ /// MetadataPath::new("targets".into()))
+ /// assert_eq!(MetadataPath::from_role(&Role::Timestamp),
+ /// MetadataPath::new("timestamp".into()))
+ /// ```
+ pub fn from_role(role: &Role) -> Self {
+ Self::new(format!("{}", role)).unwrap()
+ }
+
+ /// Split `MetadataPath` into components that can be joined to create URL paths, Unix paths, or
+ /// Windows paths.
+ ///
+ /// ```
+ /// use tuf::crypto::HashValue;
+ /// use tuf::interchange::JsonDataInterchange;
+ /// use tuf::metadata::{MetadataPath, MetadataVersion};
+ ///
+ /// let path = MetadataPath::new("foo/bar".into()).unwrap();
+ /// assert_eq!(path.components::<JsonDataInterchange>(&MetadataVersion::None),
+ /// ["foo".to_string(), "bar.json".to_string()]);
+ /// assert_eq!(path.components::<JsonDataInterchange>(&MetadataVersion::Number(1)),
+ /// ["foo".to_string(), "1.bar.json".to_string()]);
+ /// assert_eq!(path.components::<JsonDataInterchange>(
+ /// &MetadataVersion::Hash(HashValue::new(vec![0x69, 0xb7, 0x1d]))),
+ /// ["foo".to_string(), "abcd.bar.json".to_string()]);
+ /// ```
+ pub fn components<D>(&self, version: &MetadataVersion) -> Vec<String>
+ where
+ D: DataInterchange,
+ {
+ let mut buf: Vec<String> = self.0.split('/').map(|s| s.to_string()).collect();
+ let len = buf.len();
+ buf[len - 1] = format!("{}{}.{}", version.prefix(), buf[len - 1], D::extension());
+ buf
}
}
-impl<'de> Deserialize<'de> for TargetsMetadata {
- fn deserialize<D: Deserializer<'de>>(de: D) -> Result<Self, D::Error> {
- if let json::Value::Object(mut object) = Deserialize::deserialize(de)? {
- let delegations = match object.remove("delegations") {
- // TODO this should accept null / empty object too
- // currently the options are "not present at all" or "completely correct"
- // and everything else errors out
- Some(value) => {
- Some(json::from_value(value).map_err(|e| {
- DeserializeError::custom(format!("Bad delegations format: {}", e))
- })?)
- }
- None => None,
- };
-
- let expires = json::from_value(object.remove("expires")
- .ok_or_else(|| DeserializeError::custom("Field 'expires' missing"))?).map_err(|e| {
- DeserializeError::custom(format!("Field 'expires did not have a valid format: {}", e))
- })?;
-
- let version = json::from_value(object.remove("version")
- .ok_or_else(|| DeserializeError::custom("Field 'version' missing"))?).map_err(|e| {
- DeserializeError::custom(format!("Field 'version' did not have a valid format: {}", e))
- })?;
-
- match object.remove("targets") {
- Some(t) => {
- let targets =
- json::from_value(t).map_err(|e| {
- DeserializeError::custom(format!("Bad targets format: {}", e))
- })?;
-
- Ok(TargetsMetadata {
- version: version,
- expires: expires,
- delegations: delegations,
- targets: targets,
- })
- }
- _ => Err(DeserializeError::custom("Signature missing fields".to_string())),
- }
- } else {
- Err(DeserializeError::custom("Role was not an object"))
- }
+impl ToString for MetadataPath {
+ fn to_string(&self) -> String {
+ self.0.clone()
}
}
+impl<'de> Deserialize<'de> for MetadataPath {
+ fn deserialize<D: Deserializer<'de>>(de: D) -> ::std::result::Result<Self, D::Error> {
+ let s: String = Deserialize::deserialize(de)?;
+ MetadataPath::new(s).map_err(|e| DeserializeError::custom(format!("{:?}", e)))
+ }
+}
-#[derive(Debug)]
+/// Metadata for the timestamp role.
+#[derive(Debug, PartialEq)]
pub struct TimestampMetadata {
- expires: DateTime<UTC>,
- pub version: i32,
- pub meta: HashMap<String, MetadataMetadata>,
+ version: u32,
+ expires: DateTime<Utc>,
+ meta: HashMap<MetadataPath, MetadataDescription>,
}
-impl Metadata<Timestamp> for TimestampMetadata {
- fn expires(&self) -> &DateTime<UTC> {
+impl TimestampMetadata {
+ /// Create new `TimestampMetadata`.
+ pub fn new(
+ version: u32,
+ expires: DateTime<Utc>,
+ meta: HashMap<MetadataPath, MetadataDescription>,
+ ) -> Result<Self> {
+ if version < 1 {
+ return Err(Error::IllegalArgument(format!(
+ "Metadata version must be greater than zero. Found: {}",
+ version
+ )));
+ }
+
+ Ok(TimestampMetadata {
+ version: version,
+ expires: expires,
+ meta: meta,
+ })
+ }
+
+ /// The version number.
+ pub fn version(&self) -> u32 {
+ self.version
+ }
+
+ /// An immutable reference to the metadata's expiration `DateTime`.
+ pub fn expires(&self) -> &DateTime<Utc> {
&self.expires
}
+
+ /// An immutable reference to the metadata paths and descriptions.
+ pub fn meta(&self) -> &HashMap<MetadataPath, MetadataDescription> {
+ &self.meta
+ }
+}
+
+impl Metadata for TimestampMetadata {
+ fn role() -> Role {
+ Role::Timestamp
+ }
+}
+
+impl Serialize for TimestampMetadata {
+ fn serialize<S>(&self, ser: S) -> ::std::result::Result<S::Ok, S::Error>
+ where
+ S: Serializer,
+ {
+ shims::TimestampMetadata::from(self)
+ .map_err(|e| SerializeError::custom(format!("{:?}", e)))?
+ .serialize(ser)
+ }
}
impl<'de> Deserialize<'de> for TimestampMetadata {
- fn deserialize<D: Deserializer<'de>>(de: D) -> Result<Self, D::Error> {
- if let json::Value::Object(mut object) = Deserialize::deserialize(de)? {
-
- let expires = json::from_value(object.remove("expires")
- .ok_or_else(|| DeserializeError::custom("Field 'expires' missing"))?).map_err(|e| {
- DeserializeError::custom(format!("Field 'expires' did not have a valid format: {}", e))
- })?;
-
- let version = json::from_value(object.remove("version")
- .ok_or_else(|| DeserializeError::custom("Field 'version' missing"))?).map_err(|e| {
- DeserializeError::custom(format!("Field 'version' did not have a valid format: {}", e))
- })?;
-
- match object.remove("meta") {
- Some(m) => {
- let meta = json::from_value(m).map_err(|e| {
- DeserializeError::custom(format!("Bad meta-meta format: {}", e))
- })?;
-
- Ok(TimestampMetadata {
- expires: expires,
- version: version,
- meta: meta,
- })
- }
- _ => Err(DeserializeError::custom("Signature missing fields".to_string())),
- }
- } else {
- Err(DeserializeError::custom("Role was not an object"))
- }
+ fn deserialize<D: Deserializer<'de>>(de: D) -> ::std::result::Result<Self, D::Error> {
+ let intermediate: shims::TimestampMetadata = Deserialize::deserialize(de)?;
+ intermediate.try_into().map_err(|e| {
+ DeserializeError::custom(format!("{:?}", e))
+ })
}
}
-
-#[derive(Debug)]
-pub struct SnapshotMetadata {
- expires: DateTime<UTC>,
- pub version: i32,
- pub meta: HashMap<String, SnapshotMetadataMetadata>,
+/// Description of a piece of metadata, used in verification.
+#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
+pub struct MetadataDescription {
+ version: u32,
}
-impl Metadata<Snapshot> for SnapshotMetadata {
- fn expires(&self) -> &DateTime<UTC> {
+impl MetadataDescription {
+ /// Create a new `MetadataDescription`.
+ pub fn new(version: u32) -> Result<Self> {
+ if version < 1 {
+ return Err(Error::IllegalArgument(format!(
+ "Metadata version must be greater than zero. Found: {}",
+ version
+ )));
+ }
+
+ Ok(MetadataDescription { version: version })
+ }
+
+ /// The version of the described metadata.
+ pub fn version(&self) -> u32 {
+ self.version
+ }
+}
+
+/// Metadata for the snapshot role.
+#[derive(Debug, PartialEq)]
+pub struct SnapshotMetadata {
+ version: u32,
+ expires: DateTime<Utc>,
+ meta: HashMap<MetadataPath, MetadataDescription>,
+}
+
+impl SnapshotMetadata {
+ /// Create new `SnapshotMetadata`.
+ pub fn new(
+ version: u32,
+ expires: DateTime<Utc>,
+ meta: HashMap<MetadataPath, MetadataDescription>,
+ ) -> Result<Self> {
+ if version < 1 {
+ return Err(Error::IllegalArgument(format!(
+ "Metadata version must be greater than zero. Found: {}",
+ version
+ )));
+ }
+
+ Ok(SnapshotMetadata {
+ version: version,
+ expires: expires,
+ meta: meta,
+ })
+ }
+
+ /// The version number.
+ pub fn version(&self) -> u32 {
+ self.version
+ }
+
+ /// An immutable reference to the metadata's expiration `DateTime`.
+ pub fn expires(&self) -> &DateTime<Utc> {
&self.expires
}
+
+ /// An immutable reference to the metadata paths and descriptions.
+ pub fn meta(&self) -> &HashMap<MetadataPath, MetadataDescription> {
+ &self.meta
+ }
+}
+
+impl Metadata for SnapshotMetadata {
+ fn role() -> Role {
+ Role::Snapshot
+ }
+}
+
+impl Serialize for SnapshotMetadata {
+ fn serialize<S>(&self, ser: S) -> ::std::result::Result<S::Ok, S::Error>
+ where
+ S: Serializer,
+ {
+ shims::SnapshotMetadata::from(self)
+ .map_err(|e| SerializeError::custom(format!("{:?}", e)))?
+ .serialize(ser)
+ }
}
impl<'de> Deserialize<'de> for SnapshotMetadata {
- fn deserialize<D: Deserializer<'de>>(de: D) -> Result<Self, D::Error> {
- if let json::Value::Object(mut object) = Deserialize::deserialize(de)? {
- let expires = json::from_value(object.remove("expires")
- .ok_or_else(|| DeserializeError::custom("Field 'expires' missing"))?).map_err(|e| {
- DeserializeError::custom(format!("Field 'expires' did not have a valid format: {}", e))
- })?;
-
- let version = json::from_value(object.remove("version")
- .ok_or_else(|| DeserializeError::custom("Field 'version' missing"))?).map_err(|e| {
- DeserializeError::custom(format!("Field 'version' did not have a valid format: {}", e))
- })?;
-
- match object.remove("meta") {
- Some(m) => {
- let meta = json::from_value(m).map_err(|e| {
- DeserializeError::custom(format!("Bad meta-meta format: {}", e))
- })?;
-
- Ok(SnapshotMetadata {
- expires: expires,
- version: version,
- meta: meta,
- })
- }
- _ => Err(DeserializeError::custom("Signature missing fields".to_string())),
- }
- } else {
- Err(DeserializeError::custom("Role was not an object"))
- }
- }
-}
-
-/// A cryptographic signature.
-#[derive(Clone, PartialEq, Debug)]
-pub struct Signature {
- pub key_id: KeyId,
- pub method: SignatureScheme,
- pub sig: SignatureValue,
-}
-
-impl<'de> Deserialize<'de> for Signature {
- fn deserialize<D: Deserializer<'de>>(de: D) -> Result<Self, D::Error> {
- if let json::Value::Object(mut object) = Deserialize::deserialize(de)? {
- match (object.remove("keyid"), object.remove("method"), object.remove("sig")) {
- (Some(k), Some(m), Some(s)) => {
- let key_id =
- json::from_value(k).map_err(|e| {
- DeserializeError::custom(format!("Failed at keyid: {}", e))
- })?;
- let method =
- json::from_value(m).map_err(|e| {
- DeserializeError::custom(format!("Failed at method: {}", e))
- })?;
- let sig = json::from_value(s)
- .map_err(|e| DeserializeError::custom(format!("Failed at sig: {}", e)))?;
-
- Ok(Signature {
- key_id: key_id,
- method: method,
- sig: sig,
- })
- }
- _ => Err(DeserializeError::custom("Signature missing fields".to_string())),
- }
- } else {
- Err(DeserializeError::custom("Signature was not an object".to_string()))
- }
+ fn deserialize<D: Deserializer<'de>>(de: D) -> ::std::result::Result<Self, D::Error> {
+ let intermediate: shims::SnapshotMetadata = Deserialize::deserialize(de)?;
+ intermediate.try_into().map_err(|e| {
+ DeserializeError::custom(format!("{:?}", e))
+ })
}
}
-/// A public key
-#[derive(Clone, PartialEq, Debug, Deserialize)]
-pub struct Key {
- /// The type of keys.
- #[serde(rename = "keytype")]
- pub typ: KeyType,
- /// The key's value.
- #[serde(rename = "keyval")]
- pub value: KeyValue,
-}
+/// Wrapper for a path to a target.
+#[derive(Debug, Clone, PartialEq, Hash, Eq, Serialize)]
+pub struct TargetPath(String);
-impl Key {
- /// Use the given key to verify a signature over a byte array.
- pub fn verify(&self,
- scheme: &SignatureScheme,
- msg: &[u8],
- sig: &SignatureValue)
- -> Result<(), Error> {
- if self.typ.supports(scheme) {
- match self.typ {
- KeyType::Unsupported(ref s) => Err(Error::UnsupportedKeyType(s.clone())),
- _ => scheme.verify(&self.value, msg, sig),
- }
- } else {
- Err(Error::Generic(format!("Signature scheme mismatch: Key {:?}, Scheme {:?}",
- self,
- scheme)))
- }
+impl TargetPath {
+ /// Create a new `TargetPath` from a `String`.
+ ///
+ /// ```
+ /// use tuf::metadata::TargetPath;
+ ///
+ /// assert!(TargetPath::new("foo".into()).is_ok());
+ /// assert!(TargetPath::new("/foo".into()).is_err());
+ /// assert!(TargetPath::new("../foo".into()).is_err());
+ /// assert!(TargetPath::new("foo/".into()).is_err());
+ /// assert!(TargetPath::new("foo/..".into()).is_err());
+ /// assert!(TargetPath::new("foo/../bar".into()).is_err());
+ /// assert!(TargetPath::new("..foo".into()).is_ok());
+ /// assert!(TargetPath::new("foo//bar".into()).is_err());
+ /// assert!(TargetPath::new("foo/..bar".into()).is_ok());
+ /// assert!(TargetPath::new("foo/bar..".into()).is_ok());
+ /// ```
+ pub fn new(path: String) -> Result<Self> {
+ safe_path(&path)?;
+ Ok(TargetPath(path))
+ }
+
+ /// Split `TargetPath` into components that can be joined to create URL paths, Unix paths, or
+ /// Windows paths.
+ ///
+ /// ```
+ /// use tuf::metadata::TargetPath;
+ ///
+ /// let path = TargetPath::new("foo/bar".into()).unwrap();
+ /// assert_eq!(path.components(), ["foo".to_string(), "bar".to_string()]);
+ /// ```
+ pub fn components(&self) -> Vec<String> {
+ self.0.split('/').map(|s| s.to_string()).collect()
}
}
-/// Types of public keys.
-#[derive(Clone, PartialEq, Debug)]
-pub enum KeyType {
- /// [Ed25519](https://en.wikipedia.org/wiki/EdDSA#Ed25519) signature scheme.
- Ed25519,
- /// [RSA](https://en.wikipedia.org/wiki/RSA_%28cryptosystem%29)
- Rsa,
- /// Internal representation of an unsupported key type.
- Unsupported(String),
-}
-
-impl KeyType {
- fn supports(&self, scheme: &SignatureScheme) -> bool {
- match (self, scheme) {
- (&KeyType::Ed25519, &SignatureScheme::Ed25519) => true,
- (&KeyType::Rsa, &SignatureScheme::RsaSsaPssSha256) => true,
- (&KeyType::Rsa, &SignatureScheme::RsaSsaPssSha512) => true,
- _ => false,
- }
+impl ToString for TargetPath {
+ fn to_string(&self) -> String {
+ self.0.clone()
}
}
-impl FromStr for KeyType {
- type Err = Error;
-
- fn from_str(s: &str) -> Result<Self, Self::Err> {
- match s {
- "ed25519" => Ok(KeyType::Ed25519),
- "rsa" => Ok(KeyType::Rsa),
- typ => Ok(KeyType::Unsupported(typ.into())),
- }
+impl<'de> Deserialize<'de> for TargetPath {
+ fn deserialize<D: Deserializer<'de>>(de: D) -> ::std::result::Result<Self, D::Error> {
+ let s: String = Deserialize::deserialize(de)?;
+ TargetPath::new(s).map_err(|e| DeserializeError::custom(format!("{:?}", e)))
}
}
-impl<'de> Deserialize<'de> for KeyType {
- fn deserialize<D: Deserializer<'de>>(de: D) -> Result<Self, D::Error> {
- if let json::Value::String(ref s) = Deserialize::deserialize(de)? {
- s.parse().map_err(|_| unreachable!())
- } else {
- Err(DeserializeError::custom("Key type was not a string"))
- }
- }
+/// Description of a target, used in verification.
+#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
+pub struct TargetDescription {
+ length: u64,
+ hashes: HashMap<HashAlgorithm, HashValue>,
}
+impl TargetDescription {
+ /// Read the from the given reader and calculate the length and hash values.
+ ///
+ /// ```
+ /// extern crate data_encoding;
+ /// extern crate tuf;
+ /// use data_encoding::BASE64URL;
+ /// use tuf::crypto::{HashAlgorithm,HashValue};
+ /// use tuf::metadata::TargetDescription;
+ ///
+ /// fn main() {
+ /// let bytes: &[u8] = b"it was a pleasure to burn";
+ /// let target_description = TargetDescription::from_reader(bytes).unwrap();
+ ///
+ /// // $ printf 'it was a pleasure to burn' | sha256sum
+ /// let s = "Rd9zlbzrdWfeL7gnIEi05X-Yv2TCpy4qqZM1N72ZWQs=";
+ /// let sha256 = HashValue::new(BASE64URL.decode(s.as_bytes()).unwrap());
+ ///
+ /// // $ printf 'it was a pleasure to burn' | sha512sum
+ /// let s ="tuIxwKybYdvJpWuUj6dubvpwhkAozWB6hMJIRzqn2jOUdtDTBg381brV4K\
+ /// BU1zKP8GShoJuXEtCf5NkDTCEJgQ==";
+ /// let sha512 = HashValue::new(BASE64URL.decode(s.as_bytes()).unwrap());
+ ///
+ /// assert_eq!(target_description.length(), bytes.len() as u64);
+ /// assert_eq!(target_description.hashes().get(&HashAlgorithm::Sha256), Some(&sha256));
+ /// assert_eq!(target_description.hashes().get(&HashAlgorithm::Sha512), Some(&sha512));
+ /// }
+ /// ```
+ pub fn from_reader<R>(mut read: R) -> Result<Self>
+ where
+ R: Read,
+ {
+ let mut length = 0;
+ let mut sha256 = digest::Context::new(&SHA256);
+ let mut sha512 = digest::Context::new(&SHA512);
-/// The raw bytes of a public key.
-#[derive(Clone, PartialEq, Debug)]
-pub struct KeyValue {
- /// The key's raw bytes.
- pub value: Vec<u8>,
- /// The key's original value, needed for ID calculation
- pub original: String,
- /// The key's type,
- pub typ: KeyType,
-}
-
-impl KeyValue {
- /// Calculates the `KeyId` of the public key.
- pub fn key_id(&self) -> KeyId {
- match self.typ {
- KeyType::Unsupported(_) => KeyId(String::from("error")), // TODO this feels wrong, but we check this everywhere else
- _ => {
- let key_value = canonicalize(&json::Value::String(self.original.clone())).unwrap(); // TODO unwrap
- KeyId(HEXLOWER.encode(digest(&SHA256, &key_value).as_ref()))
- }
- }
- }
-}
-
-impl<'de> Deserialize<'de> for KeyValue {
- fn deserialize<D: Deserializer<'de>>(de: D) -> Result<Self, D::Error> {
- match Deserialize::deserialize(de)? {
- json::Value::String(ref s) => {
- // TODO this is pretty shaky
- if s.starts_with("-----") {
- pem::parse(s)
- .map(|p| {
- KeyValue {
- value: p.contents,
- original: s.clone(),
- typ: KeyType::Rsa,
- }
- })
- .map_err(|e| {
- DeserializeError::custom(format!("Key was not PEM encoded: {}", e))
- })
- } else {
- HEXLOWER.decode(s.as_ref())
- .map(|v| {
- KeyValue {
- value: v,
- original: s.clone(),
- typ: KeyType::Ed25519,
- }
- })
- .map_err(|e| {
- DeserializeError::custom(format!("Key value was not hex: {}", e))
- })
- }
- }
- json::Value::Object(mut object) => {
- json::from_value::<KeyValue>(object.remove("public")
- .ok_or_else(|| DeserializeError::custom("Field 'public' missing"))?)
- .map_err(|e| {
- DeserializeError::custom(format!("Field 'public' not encoded correctly: \
- {}",
- e))
- })
- }
- _ => Err(DeserializeError::custom("Key value was not a string or object")),
- }
- }
-}
-
-
-/// The hex encoded ID of a public key.
-#[derive(Clone, Hash, Eq, PartialEq, Debug)]
-pub struct KeyId(pub String);
-
-impl<'de> Deserialize<'de> for KeyId {
- fn deserialize<D: Deserializer<'de>>(de: D) -> Result<Self, D::Error> {
- match Deserialize::deserialize(de)? {
- json::Value::String(s) => Ok(KeyId(s)),
- _ => Err(DeserializeError::custom("Key ID was not a string")),
- }
- }
-}
-
-
-#[derive(Clone, PartialEq, Debug)]
-pub struct SignatureValue(Vec<u8>);
-
-impl<'de> Deserialize<'de> for SignatureValue {
- fn deserialize<D: Deserializer<'de>>(de: D) -> Result<Self, D::Error> {
- match Deserialize::deserialize(de)? {
- json::Value::String(ref s) => {
- HEXLOWER.decode(s.as_ref())
- .map(SignatureValue)
- .map_err(|e| {
- DeserializeError::custom(format!("Signature value was not hex: {}", e))
- })
- }
- _ => Err(DeserializeError::custom("Signature value was not a string")),
- }
- }
-}
-
-
-#[derive(Clone, PartialEq, Debug)]
-pub enum SignatureScheme {
- Ed25519,
- RsaSsaPssSha256,
- RsaSsaPssSha512,
- Unsupported(String),
-}
-
-impl SignatureScheme {
- fn verify(&self, pub_key: &KeyValue, msg: &[u8], sig: &SignatureValue) -> Result<(), Error> {
- let alg: &ring::signature::VerificationAlgorithm = match self {
- &SignatureScheme::Ed25519 => &ED25519,
- &SignatureScheme::RsaSsaPssSha256 => &RSA_PSS_2048_8192_SHA256,
- &SignatureScheme::RsaSsaPssSha512 => &RSA_PSS_2048_8192_SHA512,
- &SignatureScheme::Unsupported(ref s) => {
- return Err(Error::UnsupportedSignatureScheme(s.clone()));
- }
- };
-
- ring::signature::verify(alg, Input::from(&convert_to_pkcs1(&pub_key.value)),
- Input::from(msg), Input::from(&sig.0))
- .map_err(|_| Error::VerificationFailure("Bad signature".into()))
- }
-}
-
-impl FromStr for SignatureScheme {
- type Err = Error;
-
- fn from_str(s: &str) -> Result<Self, Self::Err> {
- match s {
- "ed25519" => Ok(SignatureScheme::Ed25519),
- "rsassa-pss-sha256" => Ok(SignatureScheme::RsaSsaPssSha256),
- "rsassa-pss-sha512" => Ok(SignatureScheme::RsaSsaPssSha512),
- typ => Ok(SignatureScheme::Unsupported(typ.into())),
- }
- }
-}
-
-impl<'de> Deserialize<'de> for SignatureScheme {
- fn deserialize<D: Deserializer<'de>>(de: D) -> Result<Self, D::Error> {
- if let json::Value::String(ref s) = Deserialize::deserialize(de)? {
- s.parse().map_err(|_| unreachable!())
- } else {
- Err(DeserializeError::custom("Key type was not a string"))
- }
- }
-}
-
-
-#[derive(Clone, PartialEq, Debug, Deserialize)]
-pub struct MetadataMetadata {
- pub length: i64,
- pub hashes: HashMap<HashType, HashValue>,
- pub version: i32,
-}
-
-
-#[derive(Clone, PartialEq, Debug, Deserialize)]
-pub struct SnapshotMetadataMetadata {
- pub length: Option<i64>,
- pub hashes: Option<HashMap<HashType, HashValue>>,
- pub version: i32,
-}
-
-
-#[derive(Clone, Hash, Eq, PartialEq, Debug)]
-pub enum HashType {
- Sha256,
- Sha512,
- Unsupported(String),
-}
-
-impl HashType {
- pub fn preferences() -> &'static [HashType] {
- HASH_PREFERENCES
- }
-}
-
-impl FromStr for HashType {
- type Err = Error;
-
- fn from_str(s: &str) -> Result<Self, Self::Err> {
- match s {
- "sha256" => Ok(HashType::Sha256),
- "sha512" => Ok(HashType::Sha512),
- typ => Ok(HashType::Unsupported(typ.into())),
- }
- }
-}
-
-impl<'de> Deserialize<'de> for HashType {
- fn deserialize<D: Deserializer<'de>>(de: D) -> Result<Self, D::Error> {
- if let json::Value::String(ref s) = Deserialize::deserialize(de)? {
- s.parse().map_err(|_| unreachable!())
- } else {
- Err(DeserializeError::custom("Hash type was not a string"))
- }
- }
-}
-
-
-#[derive(Clone, PartialEq, Debug)]
-pub struct HashValue(pub Vec<u8>);
-impl<'de> Deserialize<'de> for HashValue {
- fn deserialize<D: Deserializer<'de>>(de: D) -> Result<Self, D::Error> {
- match Deserialize::deserialize(de)? {
- json::Value::String(ref s) => {
- HEXLOWER.decode(s.as_ref())
- .map(HashValue)
- .map_err(|e| DeserializeError::custom(format!("Hash value was not hex: {}", e)))
- }
- _ => Err(DeserializeError::custom("Hash value was not a string")),
- }
- }
-}
-
-#[derive(Clone, Debug, Deserialize)]
-pub struct TargetInfo {
- pub length: i64,
- pub hashes: HashMap<HashType, HashValue>,
- pub custom: Option<HashMap<String, json::Value>>,
-}
-
-
-#[derive(Clone, PartialEq, Debug, Deserialize)]
-pub struct Delegations {
- pub keys: HashMap<KeyId, Key>,
- pub roles: Vec<DelegatedRole>,
-}
-
-
-#[derive(Clone, PartialEq, Debug)]
-pub struct DelegatedRole {
- pub name: String,
- pub key_ids: Vec<KeyId>,
- pub threshold: i32,
- pub terminating: bool,
- paths: TargetPaths,
-}
-
-impl DelegatedRole {
- pub fn could_have_target(&self, target: &str) -> bool {
- match self.paths {
- TargetPaths::Patterns(ref patterns) => {
- for path in patterns.iter() {
- let path_str = path.as_str();
- if path_str == target {
- return true
- } else if path_str.ends_with("/") && target.starts_with(path_str) {
- return true
+ let mut buf = vec![0; 1024];
+ loop {
+ match read.read(&mut buf) {
+ Ok(read_bytes) => {
+ if read_bytes == 0 {
+ break;
}
+
+ length += read_bytes as u64;
+ sha256.update(&buf[0..read_bytes]);
+ sha512.update(&buf[0..read_bytes]);
}
- return false
+ e @ Err(_) => e.map(|_| ())?,
}
}
+
+ let mut hashes = HashMap::new();
+ let _ = hashes.insert(
+ HashAlgorithm::Sha256,
+ HashValue::new(sha256.finish().as_ref().to_vec()),
+ );
+ let _ = hashes.insert(
+ HashAlgorithm::Sha512,
+ HashValue::new(sha512.finish().as_ref().to_vec()),
+ );
+ Ok(TargetDescription {
+ length: length,
+ hashes: hashes,
+ })
+ }
+
+ /// The maximum length of the target.
+ pub fn length(&self) -> u64 {
+ self.length
+ }
+
+ /// An immutable reference to the list of calculated hashes.
+ pub fn hashes(&self) -> &HashMap<HashAlgorithm, HashValue> {
+ &self.hashes
}
}
-impl<'de> Deserialize<'de> for DelegatedRole {
- fn deserialize<D: Deserializer<'de>>(de: D) -> Result<Self, D::Error> {
- if let json::Value::Object(mut object) = Deserialize::deserialize(de)? {
- match (object.remove("name"), object.remove("keyids"),
- object.remove("threshold"), object.remove("terminating"),
- object.remove("paths"), object.remove("path_hash_prefixes")) {
- (Some(n), Some(ks), Some(t), Some(term), Some(ps), None) => {
- let name =
- json::from_value(n).map_err(|e| {
- DeserializeError::custom(format!("Failed at name: {}", e))
- })?;
+/// Metadata for the targets role.
+#[derive(Debug, PartialEq)]
+pub struct TargetsMetadata {
+ version: u32,
+ expires: DateTime<Utc>,
+ targets: HashMap<TargetPath, TargetDescription>,
+}
- let key_ids =
- json::from_value(ks).map_err(|e| {
- DeserializeError::custom(format!("Failed at keyids: {}", e))
- })?;
-
- let threshold =
- json::from_value(t).map_err(|e| {
- DeserializeError::custom(format!("Failed at treshold: {}", e))
- })?;
-
- let terminating =
- json::from_value(term).map_err(|e| {
- DeserializeError::custom(format!("Failed at treshold: {}", e))
- })?;
-
- let paths: Vec<String> =
- json::from_value(ps).map_err(|e| {
- DeserializeError::custom(format!("Failed at treshold: {}", e))
- })?;
-
- Ok(DelegatedRole {
- name: name,
- key_ids: key_ids,
- threshold: threshold,
- terminating: terminating,
- paths: TargetPaths::Patterns(paths),
- })
- }
- (_, _, _, _, Some(_), Some(_)) =>
- Err(DeserializeError::custom("Fields 'paths' or 'pash_hash_prefixes' are mutually exclusive".to_string())),
- (_, _, _, _, _, Some(_)) =>
- Err(DeserializeError::custom("'pash_hash_prefixes' is not yet supported".to_string())),
- _ => Err(DeserializeError::custom("Signature missing fields".to_string())),
- }
- } else {
- Err(DeserializeError::custom("Delegated role was not an object".to_string()))
+impl TargetsMetadata {
+ /// Create new `TargetsMetadata`.
+ pub fn new(
+ version: u32,
+ expires: DateTime<Utc>,
+ targets: HashMap<TargetPath, TargetDescription>,
+ ) -> Result<Self> {
+ if version < 1 {
+ return Err(Error::IllegalArgument(format!(
+ "Metadata version must be greater than zero. Found: {}",
+ version
+ )));
}
+
+ Ok(TargetsMetadata {
+ version: version,
+ expires: expires,
+ targets: targets,
+ })
+ }
+
+ /// The version number.
+ pub fn version(&self) -> u32 {
+ self.version
+ }
+
+ /// An immutable reference to the metadata's expiration `DateTime`.
+ pub fn expires(&self) -> &DateTime<Utc> {
+ &self.expires
+ }
+
+ /// An immutable reference descriptions of targets.
+ pub fn targets(&self) -> &HashMap<TargetPath, TargetDescription> {
+ &self.targets
}
}
+impl Metadata for TargetsMetadata {
+ fn role() -> Role {
+ Role::Targets
+ }
+}
-#[derive(Clone, PartialEq, Debug)]
-pub enum TargetPaths {
- Patterns(Vec<String>),
- // TODO HashPrefixes(Vec<String>),
+impl Serialize for TargetsMetadata {
+ fn serialize<S>(&self, ser: S) -> ::std::result::Result<S::Ok, S::Error>
+ where
+ S: Serializer,
+ {
+ shims::TargetsMetadata::from(self)
+ .map_err(|e| SerializeError::custom(format!("{:?}", e)))?
+ .serialize(ser)
+ }
+}
+
+impl<'de> Deserialize<'de> for TargetsMetadata {
+ fn deserialize<D: Deserializer<'de>>(de: D) -> ::std::result::Result<Self, D::Error> {
+ let intermediate: shims::TargetsMetadata = Deserialize::deserialize(de)?;
+ intermediate.try_into().map_err(|e| {
+ DeserializeError::custom(format!("{:?}", e))
+ })
+ }
}
#[cfg(test)]
mod test {
use super::*;
+ use chrono::prelude::*;
+ use json;
+ use interchange::JsonDataInterchange;
+
+ const ED25519_1_PK8: &'static [u8] = include_bytes!("../tests/ed25519/ed25519-1.pk8.der");
+ const ED25519_2_PK8: &'static [u8] = include_bytes!("../tests/ed25519/ed25519-2.pk8.der");
+ const ED25519_3_PK8: &'static [u8] = include_bytes!("../tests/ed25519/ed25519-3.pk8.der");
+ const ED25519_4_PK8: &'static [u8] = include_bytes!("../tests/ed25519/ed25519-4.pk8.der");
#[test]
- fn delegated_role_could_have_target() {
- let vectors = vec![
- ("foo", "foo", true),
- ("foo/", "foo/bar", true),
- ("foo", "foo/bar", false),
- ("foo/bar", "foo/baz", false),
- ("foo/bar/", "foo/bar/baz", true),
+ fn serde_target_path() {
+ let s = "foo/bar";
+ let t = json::from_str::<TargetPath>(&format!("\"{}\"", s)).unwrap();
+ assert_eq!(t.to_string().as_str(), s);
+ assert_eq!(json::to_value(t).unwrap(), json!("foo/bar"));
+ }
+
+ #[test]
+ fn serde_metadata_path() {
+ let s = "foo/bar";
+ let m = json::from_str::<MetadataPath>(&format!("\"{}\"", s)).unwrap();
+ assert_eq!(m.to_string().as_str(), s);
+ assert_eq!(json::to_value(m).unwrap(), json!("foo/bar"));
+ }
+
+ #[test]
+ fn serde_target_description() {
+ let s: &[u8] = b"from water does all life begin";
+ let description = TargetDescription::from_reader(s).unwrap();
+ let jsn_str = json::to_string(&description).unwrap();
+ let jsn = json!({
+ "length": 30,
+ "hashes": {
+ "sha256": "_F10XHEryG6poxJk2sDJVu61OFf2d-7QWCm7cQE8rhg=",
+ "sha512": "593J2T34bimKdKT5MmaSZ0tXvmj13EVdpTGK5p2E2R3ife-xxZ8Ql\
+ EHsezz8HeN1_Y0SJqvLfK2WKUZQc98R_A==",
+ },
+ });
+ let parsed_str: TargetDescription = json::from_str(&jsn_str).unwrap();
+ let parsed_jsn: TargetDescription = json::from_value(jsn).unwrap();
+ assert_eq!(parsed_str, parsed_jsn);
+ }
+
+ #[test]
+ fn serde_role_definition() {
+ let hashes = hashset!(
+ "diNfThTFm0PI8R-Bq7NztUIvZbZiaC_weJBgcqaHlWw=",
+ "ar9AgoRsmeEcf6Ponta_1TZu1ds5uXbDemBig30O7ck=",
+ ).iter()
+ .map(|k| KeyId::from_string(*k).unwrap())
+ .collect();
+ let role_def = RoleDefinition::new(2, hashes).unwrap();
+ let jsn = json!({
+ "threshold": 2,
+ "key_ids": [
+ // these need to be sorted for determinism
+ "ar9AgoRsmeEcf6Ponta_1TZu1ds5uXbDemBig30O7ck=",
+ "diNfThTFm0PI8R-Bq7NztUIvZbZiaC_weJBgcqaHlWw=",
+ ],
+ });
+ let encoded = json::to_value(&role_def).unwrap();
+ assert_eq!(encoded, jsn);
+ let decoded: RoleDefinition = json::from_value(encoded).unwrap();
+ assert_eq!(decoded, role_def);
+
+ let jsn = json!({
+ "threshold": 0,
+ "key_ids": [
+ "diNfThTFm0PI8R-Bq7NztUIvZbZiaC_weJBgcqaHlWw=",
+ ],
+ });
+ assert!(json::from_value::<RoleDefinition>(jsn).is_err());
+
+ let jsn = json!({
+ "threshold": -1,
+ "key_ids": [
+ "diNfThTFm0PI8R-Bq7NztUIvZbZiaC_weJBgcqaHlWw=",
+ ],
+ });
+ assert!(json::from_value::<RoleDefinition>(jsn).is_err());
+ }
+
+ #[test]
+ fn serde_root_metadata() {
+ let root_key = PrivateKey::from_pkcs8(ED25519_1_PK8).unwrap();
+ let snapshot_key = PrivateKey::from_pkcs8(ED25519_2_PK8).unwrap();
+ let targets_key = PrivateKey::from_pkcs8(ED25519_3_PK8).unwrap();
+ let timestamp_key = PrivateKey::from_pkcs8(ED25519_4_PK8).unwrap();
+
+ let keys = vec![
+ root_key.public().clone(),
+ snapshot_key.public().clone(),
+ targets_key.public().clone(),
+ timestamp_key.public().clone(),
];
- for &(prefix, target, success) in vectors.iter() {
- let delegation = DelegatedRole {
- name: "".to_string(),
- key_ids: Vec::new(),
- threshold: 1,
- terminating: false,
- paths: TargetPaths::Patterns(vec![prefix.to_string()]),
- };
+ let root_def = RoleDefinition::new(1, hashset!(root_key.key_id().clone())).unwrap();
+ let snapshot_def = RoleDefinition::new(1, hashset!(snapshot_key.key_id().clone())).unwrap();
+ let targets_def = RoleDefinition::new(1, hashset!(targets_key.key_id().clone())).unwrap();
+ let timestamp_def = RoleDefinition::new(1, hashset!(timestamp_key.key_id().clone()))
+ .unwrap();
- assert!(!success ^ delegation.could_have_target(target),
- format!("Prefix {} should have target {}: {}", prefix, target, success))
- };
+ let root = RootMetadata::new(
+ 1,
+ Utc.ymd(2017, 1, 1).and_hms(0, 0, 0),
+ false,
+ keys,
+ root_def,
+ snapshot_def,
+ targets_def,
+ timestamp_def,
+ ).unwrap();
+
+ let jsn = json!({
+ "type": "root",
+ "version": 1,
+ "expires": "2017-01-01T00:00:00Z",
+ "consistent_snapshot": false,
+ "keys": {
+ "qfrfBrkB4lBBSDEBlZgaTGS_SrE6UfmON9kP4i3dJFY=": {
+ "type": "ed25519",
+ "public_key": "MCwwBwYDK2VwBQADIQDrisJrXJ7wJ5474-giYqk7zhb-WO5CJQDTjK9GHGWjtg==",
+ },
+ "4hsyITLMQoWBg0ldCLKPlRZPIEf258cMg-xdAROsO6o=": {
+ "type": "ed25519",
+ "public_key": "MCwwBwYDK2VwBQADIQAWY3bJCn9xfQJwVicvNhwlL7BQvtGgZ_8giaAwL7q3PQ==",
+ },
+ "5WvZhiiSSUung_OhJVbPshKwD_ZNkgeg80i4oy2KAVs=": {
+ "type": "ed25519",
+ "public_key": "MCwwBwYDK2VwBQADIQBo2eyzhzcQBajrjmAQUwXDQ1ao_NhZ1_7zzCKL8rKzsg==",
+ },
+ "C2hNB7qN99EAbHVGHPIJc5Hqa9RfEilnMqsCNJ5dGdw=": {
+ "type": "ed25519",
+ "public_key": "MCwwBwYDK2VwBQADIQAUEK4wU6pwu_qYQoqHnWTTACo1ePffquscsHZOhg9-Cw==",
+ },
+ },
+ "roles": {
+ "root": {
+ "threshold": 1,
+ "key_ids": ["qfrfBrkB4lBBSDEBlZgaTGS_SrE6UfmON9kP4i3dJFY="],
+ },
+ "snapshot": {
+ "threshold": 1,
+ "key_ids": ["5WvZhiiSSUung_OhJVbPshKwD_ZNkgeg80i4oy2KAVs="],
+ },
+ "targets": {
+ "threshold": 1,
+ "key_ids": ["4hsyITLMQoWBg0ldCLKPlRZPIEf258cMg-xdAROsO6o="],
+ },
+ "timestamp": {
+ "threshold": 1,
+ "key_ids": ["C2hNB7qN99EAbHVGHPIJc5Hqa9RfEilnMqsCNJ5dGdw="],
+ },
+ },
+ });
+
+ let encoded = json::to_value(&root).unwrap();
+ assert_eq!(encoded, jsn);
+ let decoded: RootMetadata = json::from_value(encoded).unwrap();
+ assert_eq!(decoded, root);
+ }
+
+ #[test]
+ fn serde_timestamp_metadata() {
+ let timestamp = TimestampMetadata::new(
+ 1,
+ Utc.ymd(2017, 1, 1).and_hms(0, 0, 0),
+ hashmap!{
+ MetadataPath::new("foo".into()).unwrap() => MetadataDescription::new(1).unwrap(),
+ },
+ ).unwrap();
+
+ let jsn = json!({
+ "type": "timestamp",
+ "version": 1,
+ "expires": "2017-01-01T00:00:00Z",
+ "meta": {
+ "foo": {
+ "version": 1,
+ },
+ },
+ });
+
+ let encoded = json::to_value(×tamp).unwrap();
+ assert_eq!(encoded, jsn);
+ let decoded: TimestampMetadata = json::from_value(encoded).unwrap();
+ assert_eq!(decoded, timestamp);
+ }
+
+ #[test]
+ fn serde_snapshot_metadata() {
+ let snapshot = SnapshotMetadata::new(
+ 1,
+ Utc.ymd(2017, 1, 1).and_hms(0, 0, 0),
+ hashmap! {
+ MetadataPath::new("foo".into()).unwrap() => MetadataDescription::new(1).unwrap(),
+ },
+ ).unwrap();
+
+ let jsn = json!({
+ "type": "snapshot",
+ "version": 1,
+ "expires": "2017-01-01T00:00:00Z",
+ "meta": {
+ "foo": {
+ "version": 1,
+ },
+ },
+ });
+
+ let encoded = json::to_value(&snapshot).unwrap();
+ assert_eq!(encoded, jsn);
+ let decoded: SnapshotMetadata = json::from_value(encoded).unwrap();
+ assert_eq!(decoded, snapshot);
+ }
+
+ #[test]
+ fn serde_targets_metadata() {
+ let targets = TargetsMetadata::new(
+ 1,
+ Utc.ymd(2017, 1, 1).and_hms(0, 0, 0),
+ hashmap! {
+ TargetPath::new("foo".into()).unwrap() => TargetDescription::from_reader(b"foo" as &[u8]).unwrap(),
+ }
+ ).unwrap();
+
+ let jsn = json!({
+ "type": "targets",
+ "version": 1,
+ "expires": "2017-01-01T00:00:00Z",
+ "targets": {
+ "foo": {
+ "length": 3,
+ "hashes": {
+ "sha256": "LCa0a2j_xo_5m0U8HTBBNBNCLXBkg7-g-YpeiGJm564=",
+ "sha512": "9_u6bgY2-JDlb7vzKD5STG-jIErimDgtYkdB0NxmODJuKCxBvl5CVNiCB3LFUYosWowMf37aGVlKfrU5RT4e1w==",
+ },
+ },
+ },
+ });
+
+ let encoded = json::to_value(&targets).unwrap();
+ assert_eq!(encoded, jsn);
+ let decoded: TargetsMetadata = json::from_value(encoded).unwrap();
+ assert_eq!(decoded, targets);
+ }
+
+ #[test]
+ fn serde_signed_metadata() {
+ let snapshot = SnapshotMetadata::new(
+ 1,
+ Utc.ymd(2017, 1, 1).and_hms(0, 0, 0),
+ hashmap! {
+ MetadataPath::new("foo".into()).unwrap() => MetadataDescription::new(1).unwrap(),
+ },
+ ).unwrap();
+
+ let key = PrivateKey::from_pkcs8(ED25519_1_PK8).unwrap();
+
+ let signed = SignedMetadata::<JsonDataInterchange, SnapshotMetadata>::new(
+ &snapshot,
+ &key,
+ SignatureScheme::Ed25519,
+ ).unwrap();
+
+ let jsn = json!({
+ "signatures": [
+ {
+ "key_id": "qfrfBrkB4lBBSDEBlZgaTGS_SrE6UfmON9kP4i3dJFY=",
+ "scheme": "ed25519",
+ "value": "T2cUdVcGn08q9Cl4sKXqQni4J63TxZ48wR3jt583QuWXJ2AmxRHwEnWIHtkCOmzohF4D0v9JspeH6samO-H6CA==",
+ }
+ ],
+ "signed": {
+ "type": "snapshot",
+ "version": 1,
+ "expires": "2017-01-01T00:00:00Z",
+ "meta": {
+ "foo": {
+ "version": 1,
+ },
+ },
+ },
+ });
+
+ let encoded = json::to_value(&signed).unwrap();
+ assert_eq!(encoded, jsn);
+ let decoded: SignedMetadata<JsonDataInterchange, SnapshotMetadata> =
+ json::from_value(encoded).unwrap();
+ assert_eq!(decoded, signed);
}
}
diff --git a/src/repository.rs b/src/repository.rs
new file mode 100644
index 0000000..04f9d2c
--- /dev/null
+++ b/src/repository.rs
@@ -0,0 +1,594 @@
+//! Interfaces for interacting with different types of TUF repositories.
+
+use hyper::{Url, Client};
+use hyper::client::response::Response;
+use hyper::header::{Headers, UserAgent};
+use hyper::status::StatusCode;
+use ring::digest::{self, SHA256, SHA512};
+use std::collections::HashMap;
+use std::fs::{self, File, DirBuilder};
+use std::io::{Read, Write, Cursor};
+use std::marker::PhantomData;
+use std::path::PathBuf;
+use tempfile::NamedTempFile;
+
+use Result;
+use crypto::{self, HashAlgorithm, HashValue};
+use error::Error;
+use metadata::{SignedMetadata, MetadataVersion, Role, Metadata, TargetPath, TargetDescription,
+ MetadataPath};
+use interchange::DataInterchange;
+
+/// Top-level trait that represents a TUF repository and contains all the ways it can be interacted
+/// with.
+pub trait Repository<D>
+where
+ D: DataInterchange,
+{
+ /// The type returned when reading a target.
+ type TargetRead: Read;
+
+ /// Initialize the repository.
+ fn initialize(&mut self) -> Result<()>;
+
+ /// Store signed metadata.
+ fn store_metadata<M>(
+ &mut self,
+ role: &Role,
+ meta_path: &MetadataPath,
+ version: &MetadataVersion,
+ metadata: &SignedMetadata<D, M>,
+ ) -> Result<()>
+ where
+ M: Metadata;
+
+ /// Fetch signed metadata.
+ fn fetch_metadata<M>(
+ &mut self,
+ role: &Role,
+ meta_path: &MetadataPath,
+ version: &MetadataVersion,
+ max_size: &Option<usize>,
+ hash_data: Option<(&HashAlgorithm, &HashValue)>,
+ ) -> Result<SignedMetadata<D, M>>
+ where
+ M: Metadata;
+
+ /// Store the given target.
+ fn store_target<R>(
+ &mut self,
+ read: R,
+ target_path: &TargetPath,
+ target_description: &TargetDescription,
+ ) -> Result<()>
+ where
+ R: Read;
+
+ /// Fetch the given target.
+ ///
+ /// **WARNING**: The target will **NOT** yet be verified.
+ fn fetch_target(&mut self, target_path: &TargetPath) -> Result<Self::TargetRead>;
+
+ /// Perform a sanity check that `M`, `Role`, and `MetadataPath` all desrcribe the same entity.
+ fn check<M>(role: &Role, meta_path: &MetadataPath) -> Result<()>
+ where
+ M: Metadata,
+ {
+ if role != &M::role() {
+ return Err(Error::IllegalArgument(format!(
+ "Attempted to store {} metadata as {}.",
+ M::role(),
+ role
+ )));
+ }
+
+ if !role.fuzzy_matches_path(meta_path) {
+ return Err(Error::IllegalArgument(
+ format!("Role {} does not match path {:?}", role, meta_path),
+ ));
+ }
+
+ Ok(())
+ }
+
+ /// Read the from given reader, optionally capped at `max_size` bytes, optionally requiring
+ /// hashes to match.
+ fn safe_read<R, W>(
+ mut read: R,
+ mut write: W,
+ max_size: Option<i64>,
+ hash_data: Option<(&HashAlgorithm, &HashValue)>,
+ ) -> Result<()>
+ where
+ R: Read,
+ W: Write,
+ {
+ let mut context = match hash_data {
+ Some((&HashAlgorithm::Sha256, _)) => Some(digest::Context::new(&SHA256)),
+ Some((&HashAlgorithm::Sha512, _)) => Some(digest::Context::new(&SHA512)),
+ None => None,
+ };
+
+ let mut buf = [0; 1024];
+ let mut bytes_left = max_size.unwrap_or(::std::i64::MAX);
+
+ loop {
+ match read.read(&mut buf) {
+ Ok(read_bytes) => {
+ if read_bytes == 0 {
+ break;
+ }
+
+ bytes_left -= read_bytes as i64;
+ if bytes_left < 0 {
+ return Err(Error::VerificationFailure(
+ "Read exceeded the maximum allowed bytes.".into(),
+ ));
+ }
+
+ write.write_all(&buf[0..read_bytes])?;
+
+ match context {
+ Some(ref mut c) => c.update(&buf[0..read_bytes]),
+ None => (),
+ };
+ }
+ e @ Err(_) => e.map(|_| ())?,
+ }
+ }
+
+ let generated_hash = context.map(|c| c.finish());
+
+ match (generated_hash, hash_data) {
+ (Some(generated_hash), Some((_, expected_hash)))
+ if generated_hash.as_ref() != expected_hash.value() => {
+ Err(Error::VerificationFailure(
+ "Generated hash did not match expected hash.".into(),
+ ))
+ }
+ (Some(_), None) => {
+ let msg = "Hash calculated when no expected hash supplied. \
+ This is a programming error. Please report this as a bug.";
+ error!("{}", msg);
+ Err(Error::Programming(msg.into()))
+ }
+ (None, Some(_)) => {
+ let msg = "No hash calculated when expected hash supplied. \
+ This is a programming error. Please report this as a bug.";
+ error!("{}", msg);
+ Err(Error::Programming(msg.into()))
+ }
+ (Some(_), Some(_)) |
+ (None, None) => Ok(()),
+ }
+ }
+}
+
+/// A repository contained on the local file system.
+pub struct FileSystemRepository<D>
+where
+ D: DataInterchange,
+{
+ local_path: PathBuf,
+ _interchange: PhantomData<D>,
+}
+
+impl<D> FileSystemRepository<D>
+where
+ D: DataInterchange,
+{
+ /// Create a new repository on the local file system.
+ pub fn new(local_path: PathBuf) -> Self {
+ FileSystemRepository {
+ local_path: local_path,
+ _interchange: PhantomData,
+ }
+ }
+}
+
+impl<D> Repository<D> for FileSystemRepository<D>
+where
+ D: DataInterchange,
+{
+ type TargetRead = File;
+
+ fn initialize(&mut self) -> Result<()> {
+ for p in &["metadata", "targets", "temp"] {
+ DirBuilder::new().recursive(true).create(
+ self.local_path.join(p),
+ )?
+ }
+
+ Ok(())
+ }
+
+ fn store_metadata<M>(
+ &mut self,
+ role: &Role,
+ meta_path: &MetadataPath,
+ version: &MetadataVersion,
+ metadata: &SignedMetadata<D, M>,
+ ) -> Result<()>
+ where
+ M: Metadata,
+ {
+ Self::check::<M>(role, meta_path)?;
+
+ let mut path = self.local_path.join("metadata");
+ path.extend(meta_path.components::<D>(version));
+
+ if path.exists() {
+ debug!("Metadata path exists. Deleting: {:?}", path);
+ fs::remove_file(&path)?
+ }
+
+ let mut file = File::create(&path)?;
+ D::to_writer(&mut file, metadata)?;
+ Ok(())
+
+ }
+
+ /// Fetch signed metadata.
+ fn fetch_metadata<M>(
+ &mut self,
+ role: &Role,
+ meta_path: &MetadataPath,
+ version: &MetadataVersion,
+ max_size: &Option<usize>,
+ hash_data: Option<(&HashAlgorithm, &HashValue)>,
+ ) -> Result<SignedMetadata<D, M>>
+ where
+ M: Metadata,
+ {
+ Self::check::<M>(role, meta_path)?;
+
+ let mut path = self.local_path.join("metadata");
+ path.extend(meta_path.components::<D>(&version));
+
+ let mut file = File::open(&path)?;
+ let mut out = Vec::new();
+ Self::safe_read(&mut file, &mut out, max_size.map(|x| x as i64), hash_data)?;
+
+ Ok(D::from_reader(&*out)?)
+ }
+
+ fn store_target<R>(
+ &mut self,
+ read: R,
+ target_path: &TargetPath,
+ target_description: &TargetDescription,
+ ) -> Result<()>
+ where
+ R: Read,
+ {
+ let mut temp_file = NamedTempFile::new_in(self.local_path.join("temp"))?;
+ let hash_data = crypto::hash_preference(target_description.hashes())?;
+ Self::safe_read(
+ read,
+ &mut temp_file,
+ Some(target_description.length() as i64),
+ Some(hash_data),
+ )?;
+
+ let mut path = self.local_path.clone().join("targets");
+ path.extend(target_path.components());
+ temp_file.persist(&path)?;
+
+ Ok(())
+ }
+
+ fn fetch_target(&mut self, target_path: &TargetPath) -> Result<File> {
+ let mut path = self.local_path.join("targets");
+ path.extend(target_path.components());
+
+ if !path.exists() {
+ return Err(Error::NotFound);
+ }
+
+ Ok(File::open(&path)?)
+ }
+}
+
+
+/// A repository accessible over HTTP.
+pub struct HttpRepository<D>
+where
+ D: DataInterchange,
+{
+ url: Url,
+ client: Client,
+ user_agent: String,
+ _interchange: PhantomData<D>,
+}
+
+impl<D> HttpRepository<D>
+where
+ D: DataInterchange,
+{
+ /// Create a new repository with the given `Url` and `Client`. Callers *should* include a
+ /// custom User-Agent prefix to maintainers of TUF repositories keep track of which client
+ /// versions exist in the field.
+ pub fn new(url: Url, client: Client, user_agent_prefix: Option<String>) -> Self {
+ let user_agent = match user_agent_prefix {
+ Some(ua) => format!("{} (rust-tuf/{})", ua, env!("CARGO_PKG_VERSION")),
+ None => format!("rust-tuf/{}", env!("CARGO_PKG_VERSION")),
+ };
+
+ HttpRepository {
+ url: url,
+ client: client,
+ user_agent: user_agent,
+ _interchange: PhantomData,
+ }
+ }
+
+ fn get(&self, components: &[String]) -> Result<Response> {
+ let mut headers = Headers::new();
+ headers.set(UserAgent(self.user_agent.clone()));
+
+ let mut url = self.url.clone();
+ url.path_segments_mut()
+ .map_err(|_| {
+ Error::IllegalArgument(format!("URL was 'cannot-be-a-base': {:?}", self.url))
+ })?
+ .extend(components);
+
+ let req = self.client.get(url.clone()).headers(headers);
+ let resp = req.send()?;
+
+ if !resp.status.is_success() {
+ if resp.status == StatusCode::NotFound {
+ Err(Error::NotFound)
+ } else {
+ Err(Error::Opaque(
+ format!("Error getting {:?}: {:?}", url, resp),
+ ))
+ }
+ } else {
+ Ok(resp)
+ }
+ }
+}
+
+impl<D> Repository<D> for HttpRepository<D>
+where
+ D: DataInterchange,
+{
+ type TargetRead = Response;
+
+ fn initialize(&mut self) -> Result<()> {
+ Ok(())
+ }
+
+ /// This always returns `Err` as storing over HTTP is not yet supported.
+ fn store_metadata<M>(
+ &mut self,
+ _: &Role,
+ _: &MetadataPath,
+ _: &MetadataVersion,
+ _: &SignedMetadata<D, M>,
+ ) -> Result<()>
+ where
+ M: Metadata,
+ {
+ Err(Error::Opaque(
+ "Http repo store metadata not implemented".to_string(),
+ ))
+ }
+
+ fn fetch_metadata<M>(
+ &mut self,
+ role: &Role,
+ meta_path: &MetadataPath,
+ version: &MetadataVersion,
+ max_size: &Option<usize>,
+ hash_data: Option<(&HashAlgorithm, &HashValue)>,
+ ) -> Result<SignedMetadata<D, M>>
+ where
+ M: Metadata,
+ {
+ Self::check::<M>(role, meta_path)?;
+
+ let mut resp = self.get(&meta_path.components::<D>(&version))?;
+ let mut out = Vec::new();
+ Self::safe_read(&mut resp, &mut out, max_size.map(|x| x as i64), hash_data)?;
+ Ok(D::from_reader(&*out)?)
+ }
+
+ /// This always returns `Err` as storing over HTTP is not yet supported.
+ fn store_target<R>(&mut self, _: R, _: &TargetPath, _: &TargetDescription) -> Result<()>
+ where
+ R: Read,
+ {
+ Err(Error::Opaque(
+ "Http repo store not implemented".to_string(),
+ ))
+ }
+
+ fn fetch_target(&mut self, target_path: &TargetPath) -> Result<Self::TargetRead> {
+ let resp = self.get(&target_path.components())?;
+ Ok(resp)
+ }
+}
+
+
+/// An ephemeral repository contained solely in memory.
+pub struct EphemeralRepository<D>
+where
+ D: DataInterchange,
+{
+ metadata: HashMap<(MetadataPath, MetadataVersion), Vec<u8>>,
+ targets: HashMap<TargetPath, Vec<u8>>,
+ _interchange: PhantomData<D>,
+}
+
+impl<D> EphemeralRepository<D>
+where
+ D: DataInterchange,
+{
+ /// Create a new ephemercal repository.
+ pub fn new() -> Self {
+ EphemeralRepository {
+ metadata: HashMap::new(),
+ targets: HashMap::new(),
+ _interchange: PhantomData,
+ }
+ }
+}
+
+impl<D> Repository<D> for EphemeralRepository<D>
+where
+ D: DataInterchange,
+{
+ type TargetRead = Cursor<Vec<u8>>;
+
+ fn initialize(&mut self) -> Result<()> {
+ Ok(())
+ }
+
+ fn store_metadata<M>(
+ &mut self,
+ role: &Role,
+ meta_path: &MetadataPath,
+ version: &MetadataVersion,
+ metadata: &SignedMetadata<D, M>,
+ ) -> Result<()>
+ where
+ M: Metadata,
+ {
+ Self::check::<M>(role, meta_path)?;
+ let mut buf = Vec::new();
+ D::to_writer(&mut buf, metadata)?;
+ let _ = self.metadata.insert(
+ (meta_path.clone(), version.clone()),
+ buf,
+ );
+ Ok(())
+ }
+
+ fn fetch_metadata<M>(
+ &mut self,
+ role: &Role,
+ meta_path: &MetadataPath,
+ version: &MetadataVersion,
+ max_size: &Option<usize>,
+ hash_data: Option<(&HashAlgorithm, &HashValue)>,
+ ) -> Result<SignedMetadata<D, M>>
+ where
+ M: Metadata,
+ {
+ Self::check::<M>(role, meta_path)?;
+
+ match self.metadata.get(&(meta_path.clone(), version.clone())) {
+ Some(bytes) => {
+ let mut buf = Vec::new();
+ Self::safe_read(
+ bytes.as_slice(),
+ &mut buf,
+ max_size.map(|x| x as i64),
+ hash_data,
+ )?;
+ D::from_reader(&*buf)
+ }
+ None => Err(Error::NotFound),
+ }
+ }
+
+ fn store_target<R>(
+ &mut self,
+ read: R,
+ target_path: &TargetPath,
+ target_description: &TargetDescription,
+ ) -> Result<()>
+ where
+ R: Read,
+ {
+ let mut buf = Vec::new();
+ let hash_data = crypto::hash_preference(target_description.hashes())?;
+ Self::safe_read(
+ read,
+ &mut buf,
+ Some(target_description.length() as i64),
+ Some(hash_data),
+ )?;
+ let _ = self.targets.insert(target_path.clone(), buf);
+ Ok(())
+ }
+
+ fn fetch_target(&mut self, target_path: &TargetPath) -> Result<Self::TargetRead> {
+ match self.targets.get(target_path) {
+ Some(bytes) => Ok(Cursor::new(bytes.clone())),
+ None => Err(Error::NotFound),
+ }
+ }
+}
+
+#[cfg(test)]
+mod test {
+ use super::*;
+ use tempdir::TempDir;
+ use interchange::JsonDataInterchange;
+
+ #[test]
+ fn ephemeral_repo_targets() {
+ let mut repo = EphemeralRepository::<JsonDataInterchange>::new();
+ repo.initialize().expect("initialize repo");
+
+ let data: &[u8] = b"like tears in the rain";
+ let target_description =
+ TargetDescription::from_reader(data).expect("generate target description");
+ let path = TargetPath::new("batty".into()).expect("make target path");
+ repo.store_target(data, &path, &target_description).expect(
+ "store target",
+ );
+
+ let mut read = repo.fetch_target(&path).expect("fetch target");
+ let mut buf = Vec::new();
+ read.read_to_end(&mut buf).expect("read target");
+ assert_eq!(buf.as_slice(), data);
+
+ let bad_data: &[u8] = b"you're in a desert";
+ assert!(
+ repo.store_target(bad_data, &path, &target_description)
+ .is_err()
+ );
+
+ let mut read = repo.fetch_target(&path).expect("fetch target");
+ let mut buf = Vec::new();
+ read.read_to_end(&mut buf).expect("read target");
+ assert_eq!(buf.as_slice(), data);
+ }
+
+ #[test]
+ fn file_system_repo_targets() {
+ let temp_dir = TempDir::new("rust-tuf").expect("make temp dir");
+ let mut repo =
+ FileSystemRepository::<JsonDataInterchange>::new(temp_dir.path().to_path_buf());
+ repo.initialize().expect("initialize repo");
+
+ let data: &[u8] = b"like tears in the rain";
+ let target_description =
+ TargetDescription::from_reader(data).expect("generate target desert");
+ let path = TargetPath::new("batty".into()).expect("make target path");
+ repo.store_target(data, &path, &target_description).expect(
+ "store target",
+ );
+ assert!(temp_dir.path().join("targets").join("batty").exists());
+
+ let mut read = repo.fetch_target(&path).expect("fetch target");
+ let mut buf = Vec::new();
+ read.read_to_end(&mut buf).expect("read target");
+ assert_eq!(buf.as_slice(), data);
+
+ let bad_data: &[u8] = b"you're in a desert";
+ assert!(
+ repo.store_target(bad_data, &path, &target_description)
+ .is_err()
+ );
+
+ let mut read = repo.fetch_target(&path).expect("fetch target");
+ let mut buf = Vec::new();
+ read.read_to_end(&mut buf).expect("read target");
+ assert_eq!(buf.as_slice(), data);
+ }
+}
diff --git a/src/rsa/der.rs b/src/rsa/der.rs
deleted file mode 100644
index 6b465a6..0000000
--- a/src/rsa/der.rs
+++ /dev/null
@@ -1,338 +0,0 @@
-// Copyright 2015 Brian Smith.
-//
-// Permission to use, copy, modify, and/or distribute this software for any
-// purpose with or without fee is hereby granted, provided that the above
-// copyright notice and this permission notice appear in all copies.
-//
-// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHORS DISCLAIM ALL WARRANTIES
-// WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-// MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY
-// SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-// WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION
-// OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
-// CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-//! Building blocks for parsing DER-encoded ASN.1 structures.
-//!
-//! This module contains the foundational parts of an ASN.1 DER parser.
-
-use ring;
-use std::io::{self, Write};
-use untrusted;
-
-pub const CONSTRUCTED: u8 = 1 << 5;
-
-#[derive(Clone, Copy, PartialEq, Debug)]
-#[repr(u8)]
-pub enum Tag {
- EOC = 0x00,
- Integer = 0x02,
- BitString = 0x03,
- Null = 0x05,
- OID = 0x06,
- Sequence = CONSTRUCTED | 0x10, // 0x30
-}
-
-pub fn expect_tag_and_get_value<'a>(input: &mut untrusted::Reader<'a>,
- tag: Tag)
- -> Result<untrusted::Input<'a>, ring::error::Unspecified> {
-
- let (actual_tag, inner) = read_tag_and_get_value(input)?;
- if (tag as usize) != (actual_tag as usize) {
- return Err(ring::error::Unspecified);
- }
- Ok(inner)
-}
-
-pub fn read_tag_and_get_value<'a>
- (input: &mut untrusted::Reader<'a>)
- -> Result<(u8, untrusted::Input<'a>), ring::error::Unspecified> {
- let tag = input.read_byte()?;
-
- if tag as usize == Tag::EOC as usize {
- return Ok((tag, untrusted::Input::from(&[])))
- }
-
- if (tag & 0x1F) == 0x1F {
- return Err(ring::error::Unspecified); // High tag number form is not allowed.
- }
-
- // If the high order bit of the first byte is set to zero then the length
- // is encoded in the seven remaining bits of that byte. Otherwise, those
- // seven bits represent the number of bytes used to encode the length.
- let length = match input.read_byte()? {
- n if (n & 0x80) == 0 => n as usize,
- 0x81 => {
- let second_byte = input.read_byte()?;
- if second_byte < 128 {
- return Err(ring::error::Unspecified); // Not the canonical encoding.
- }
- second_byte as usize
- }
- 0x82 => {
- let second_byte = input.read_byte()? as usize;
- let third_byte = input.read_byte()? as usize;
- let combined = (second_byte << 8) | third_byte;
-
- if combined < 256 {
- return Err(ring::error::Unspecified); // Not the canonical encoding.
- }
- combined
- }
- _ => {
- return Err(ring::error::Unspecified); // We don't support longer lengths.
- }
- };
-
-
- let inner = input.skip_and_get_input(length)?;
- Ok((tag, inner))
-}
-
-// TODO: investigate taking decoder as a reference to reduce generated code
-// size.
-pub fn nested<'a, F, R, E: Copy>(input: &mut untrusted::Reader<'a>,
- tag: Tag,
- error: E,
- decoder: F)
- -> Result<R, E>
- where F: FnOnce(&mut untrusted::Reader<'a>) -> Result<R, E>
-{
- let inner = expect_tag_and_get_value(input, tag).map_err(|_| error)?;
- inner.read_all(error, decoder)
-}
-
-fn nonnegative_integer<'a>(input: &mut untrusted::Reader<'a>,
- min_value: u8)
- -> Result<untrusted::Input<'a>, ring::error::Unspecified> {
- // Verify that |input|, which has had any leading zero stripped off, is the
- // encoding of a value of at least |min_value|.
- fn check_minimum(input: untrusted::Input,
- min_value: u8)
- -> Result<(), ring::error::Unspecified> {
- input.read_all(ring::error::Unspecified, |input| {
- let first_byte = input.read_byte()?;
- if input.at_end() && first_byte < min_value {
- return Err(ring::error::Unspecified);
- }
- let _ = input.skip_to_end();
- Ok(())
- })
- }
-
- let value = expect_tag_and_get_value(input, Tag::Integer)?;
-
- value.read_all(ring::error::Unspecified, |input| {
- // Empty encodings are not allowed.
- let first_byte = input.read_byte()?;
-
- if first_byte == 0 {
- if input.at_end() {
- // |value| is the legal encoding of zero.
- if min_value > 0 {
- return Err(ring::error::Unspecified);
- }
- return Ok(value);
- }
-
- let r = input.skip_to_end();
- r.read_all(ring::error::Unspecified, |input| {
- let second_byte = input.read_byte()?;
- if (second_byte & 0x80) == 0 {
- // A leading zero is only allowed when the value's high bit
- // is set.
- return Err(ring::error::Unspecified);
- }
- let _ = input.skip_to_end();
- Ok(())
- })?;
- check_minimum(r, min_value)?;
- return Ok(r);
- }
-
- // Negative values are not allowed.
- if (first_byte & 0x80) != 0 {
- return Err(ring::error::Unspecified);
- }
-
- let _ = input.skip_to_end();
- check_minimum(value, min_value)?;
- Ok(value)
- })
-}
-
-/// Parses a positive DER integer, returning the big-endian-encoded value, sans
-/// any leading zero byte.
-#[inline]
-pub fn positive_integer<'a>(input: &mut untrusted::Reader<'a>)
- -> Result<untrusted::Input<'a>, ring::error::Unspecified> {
- nonnegative_integer(input, 1)
-}
-
-
-pub struct Der<'a, W: Write + 'a> {
- writer: &'a mut W,
-}
-
-#[derive(Copy, Clone, Debug, Eq, PartialEq)]
-pub struct Error;
-
-impl From<ring::error::Unspecified> for Error {
- fn from(_: ring::error::Unspecified) -> Error {
- Error
- }
-}
-
-impl From<io::Error> for Error {
- fn from(_: io::Error) -> Error {
- Error
- }
-}
-
-
-impl<'a, W: Write> Der<'a, W> {
- pub fn new(writer: &'a mut W) -> Self {
- Der { writer: writer }
- }
-
- fn length_of_length(len: usize) -> u8 {
- let mut i = len;
- let mut num_bytes = 1;
-
- while i > 255 {
- num_bytes += 1;
- i >>= 8;
- }
-
- num_bytes
- }
-
- fn write_len(&mut self, len: usize) -> Result<(), Error> {
- if len >= 128 {
- let n = Self::length_of_length(len);
- self.writer.write_all(&[0x80 | n])?;
-
- for i in (1..n + 1).rev() {
- self.writer.write_all(&[(len >> ((i - 1) * 8)) as u8])?;
- }
- } else {
- self.writer.write_all(&[len as u8])?;
- }
-
- Ok(())
- }
-
- pub fn write_integer(&mut self, input: untrusted::Input) -> Result<(), Error> {
- self.writer.write_all(&[Tag::Integer as u8])?;
- let mut buf = Vec::new();
-
- input.read_all(Error, |read| {
- while let Ok(byte) = read.read_byte() {
- buf.push(byte);
- }
-
- Ok(())
- })?;
-
- self.write_len(buf.len())?;
-
- Ok(self.writer.write_all(&mut buf)?)
- }
-
- pub fn write_sequence<F: FnOnce(&mut Der<Vec<u8>>) -> Result<(), Error>>
- (&mut self,
- func: F)
- -> Result<(), Error> {
- self.writer.write_all(&[Tag::Sequence as u8])?;
- let mut buf = Vec::new();
-
- {
- let mut inner = Der::new(&mut buf);
- func(&mut inner)?;
- }
-
- self.write_len(buf.len())?;
- Ok(self.writer.write_all(&buf)?)
- }
-}
-
-
-#[cfg(test)]
-mod tests {
- use super::*;
- use untrusted;
-
- fn with_good_i<F, R>(value: &[u8], f: F)
- where F: FnOnce(&mut untrusted::Reader) -> Result<R, ring::error::Unspecified>
- {
- let r = untrusted::Input::from(value).read_all(ring::error::Unspecified, f);
- assert!(r.is_ok());
- }
-
- fn with_bad_i<F, R>(value: &[u8], f: F)
- where F: FnOnce(&mut untrusted::Reader) -> Result<R, ring::error::Unspecified>
- {
- let r = untrusted::Input::from(value).read_all(ring::error::Unspecified, f);
- assert!(r.is_err());
- }
-
- static ZERO_INTEGER: &'static [u8] = &[0x02, 0x01, 0x00];
-
- static GOOD_POSITIVE_INTEGERS: &'static [(&'static [u8], u8)] =
- &[(&[0x02, 0x01, 0x01], 0x01),
- (&[0x02, 0x01, 0x02], 0x02),
- (&[0x02, 0x01, 0x7e], 0x7e),
- (&[0x02, 0x01, 0x7f], 0x7f),
-
- // Values that need to have an 0x00 prefix to disambiguate them from
- // them from negative values.
- (&[0x02, 0x02, 0x00, 0x80], 0x80),
- (&[0x02, 0x02, 0x00, 0x81], 0x81),
- (&[0x02, 0x02, 0x00, 0xfe], 0xfe),
- (&[0x02, 0x02, 0x00, 0xff], 0xff)];
-
- static BAD_NONNEGATIVE_INTEGERS: &'static [&'static [u8]] = &[&[], // At end of input
- &[0x02], // Tag only
- &[0x02, 0x00], // Empty value
-
- // Length mismatch
- &[0x02, 0x00, 0x01],
- &[0x02, 0x01],
- &[0x02, 0x01, 0x00, 0x01],
- &[0x02, 0x01, 0x01, 0x00], // Would be valid if last byte is ignored.
- &[0x02, 0x02, 0x01],
-
- // Negative values
- &[0x02, 0x01, 0x80],
- &[0x02, 0x01, 0xfe],
- &[0x02, 0x01, 0xff],
-
- // Values that have an unnecessary leading 0x00
- &[0x02, 0x02, 0x00, 0x00],
- &[0x02, 0x02, 0x00, 0x01],
- &[0x02, 0x02, 0x00, 0x02],
- &[0x02, 0x02, 0x00, 0x7e],
- &[0x02, 0x02, 0x00, 0x7f]];
-
- #[test]
- fn test_positive_integer() {
- with_bad_i(ZERO_INTEGER, |input| {
- let _ = positive_integer(input)?;
- Ok(())
- });
- for &(ref test_in, test_out) in GOOD_POSITIVE_INTEGERS.iter() {
- with_good_i(test_in, |input| {
- let test_out = [test_out];
- assert_eq!(positive_integer(input)?,
- untrusted::Input::from(&test_out[..]));
- Ok(())
- });
- }
- for &test_in in BAD_NONNEGATIVE_INTEGERS.iter() {
- with_bad_i(test_in, |input| {
- let _ = positive_integer(input)?;
- Ok(())
- });
- }
- }
-}
diff --git a/src/rsa/mod.rs b/src/rsa/mod.rs
deleted file mode 100644
index 3a46bdb..0000000
--- a/src/rsa/mod.rs
+++ /dev/null
@@ -1,165 +0,0 @@
-//! Helper module for RSA key encoding / decoding.
-
-mod der;
-
-use untrusted::Input;
-
-use self::der::{Tag, Der};
-
-/// Corresponds to `1.2.840.113549.1.1.1 rsaEncryption(PKCS #1)`
-const RSA_PKCS1_OID: &'static [u8] = &[0x2a, 0x86, 0x48, 0x86, 0xf7, 0x0d, 0x01, 0x01, 0x01];
-
-pub fn convert_to_pkcs1<'a>(input: &[u8]) -> Vec<u8> {
- // if we ever move away from `ring`, this needs to do an explicit key size check (>= 2048)
- from_pkcs1(input)
- .or_else(|| from_spki(input))
- .unwrap_or_else(|| input.to_vec())
-}
-
-fn from_pkcs1(input: &[u8]) -> Option<Vec<u8>> {
- let _input = Input::from(&input);
- _input.read_all(der::Error, |i| {
- der::nested(i, Tag::Sequence, der::Error, |i| {
- let _ = der::positive_integer(i)?;
- let _ = der::positive_integer(i)?;
- // if the input was already pkcs1, just return it
- Ok(input.to_vec())
- })
- })
- .ok()
-}
-
-fn from_spki(input: &[u8]) -> Option<Vec<u8>> {
- let _input = Input::from(&input);
- _input.read_all(der::Error, |i| {
- der::nested(i, Tag::Sequence, der::Error, |i| {
- der::nested(i, Tag::Sequence, der::Error, |i| {
- let oid = der::expect_tag_and_get_value(i, Tag::OID)?;
- if oid != Input::from(RSA_PKCS1_OID) {
- return Err(der::Error);
- }
-
- let _ = der::expect_tag_and_get_value(i, Tag::Null)?;
- Ok(())
- })?;
-
- der::nested(i, Tag::BitString, der::Error, |i| {
- // wtf why
- let _ = der::expect_tag_and_get_value(i, Tag::EOC)?;
- Ok(i.skip_to_end().iter().cloned().collect())
- //der::nested(i, Tag::Sequence, der::Error, |i| {
- // let n = der::positive_integer(i)?;
- // let e = der::positive_integer(i)?;
- // write_pkcs1(n, e)
- //})
- })
- })
- })
- .ok()
-}
-
-#[allow(dead_code)]
-fn write_pkcs1(n: Input, e: Input) -> Result<Vec<u8>, der::Error> {
- let mut output = Vec::new();
- {
- let mut _der = Der::new(&mut output);
- _der.write_sequence(|_der| {
- _der.write_integer(n)?;
- _der.write_integer(e)
- })?;
- }
-
- Ok(output)
-}
-
-
-#[cfg(test)]
-mod test {
- use super::*;
- use pem;
- use std::fs::File;
- use std::io::Read;
-
- fn read_file(path: &str) -> Vec<u8> {
- let mut file = File::open(path).expect("couldn't open file");
- let mut buf = Vec::new();
- file.read_to_end(&mut buf).expect("couldn't read file");
- buf
- }
-
- #[test]
- fn test_write_pkcs1() {
- let contents = read_file("./tests/rsa/pkcs1-1.pub");
- let contents = pem::parse(&contents).expect("not PEM").contents;
-
- let n = &[0x00, 0x9d, 0xda, 0x85, 0x17, 0x15, 0x67, 0xab, 0xb1, 0x63, 0x8a, 0x13, 0x01,
- 0xee, 0xc0, 0x63, 0x7c, 0xc3, 0x08, 0x4b, 0x6d, 0x75, 0xd8, 0x70, 0x74, 0x3d,
- 0xab, 0x98, 0xef, 0x00, 0xd0, 0xf2, 0x04, 0xe7, 0x7d, 0xb5, 0xa4, 0x08, 0xe3,
- 0x90, 0xda, 0x4b, 0xe1, 0xd1, 0xff, 0x0f, 0x87, 0x8d, 0x6b, 0x43, 0x58, 0x99,
- 0x88, 0xf6, 0x99, 0xab, 0xe7, 0x90, 0xfb, 0x2a, 0xa1, 0x3c, 0x2b, 0x0f, 0x24,
- 0xa4, 0x9e, 0xab, 0xd1, 0xfc, 0xa0, 0xe0, 0xa8, 0x9f, 0x82, 0x48, 0xe5, 0xa7,
- 0xd2, 0x4d, 0x44, 0xe4, 0x0b, 0x43, 0x66, 0x03, 0x54, 0x8d, 0xdd, 0xc3, 0x0c,
- 0x26, 0xf5, 0x20, 0x36, 0xbf, 0xae, 0x05, 0x63, 0x9c, 0xf8, 0x81, 0xeb, 0xf7,
- 0x4a, 0x3a, 0xc4, 0x14, 0xee, 0xce, 0x99, 0x66, 0x9f, 0x3c, 0xe3, 0x18, 0x21,
- 0x8d, 0x68, 0xe3, 0x0b, 0xb7, 0xb3, 0xf7, 0xca, 0xe1, 0x7c, 0xab, 0xd5, 0x17,
- 0x6f, 0x50, 0xc1, 0x38, 0x1b, 0xea, 0x62, 0xeb, 0x46, 0x07, 0x95, 0x01, 0xfb,
- 0xd3, 0x1a, 0xd0, 0xae, 0x1c, 0xe6, 0x53, 0x27, 0x53, 0x2d, 0x08, 0x55, 0xbe,
- 0xa3, 0xd6, 0xd1, 0x02, 0x14, 0xa4, 0xa2, 0xe1, 0x14, 0xde, 0xa4, 0x0e, 0x54,
- 0x00, 0xe5, 0x79, 0x2c, 0x4d, 0x93, 0xe8, 0x6b, 0x3c, 0xf6, 0x44, 0x63, 0x85,
- 0x3c, 0x6f, 0x56, 0xc2, 0x80, 0x02, 0x3f, 0x76, 0xcf, 0x75, 0x46, 0x5f, 0x9a,
- 0x49, 0x47, 0xdc, 0xe6, 0xe9, 0x9a, 0xc0, 0x6e, 0x34, 0x9e, 0x9f, 0xd2, 0xdf,
- 0xbc, 0x55, 0xa0, 0x77, 0x61, 0xf3, 0xd5, 0x0c, 0xb8, 0x77, 0xd2, 0x66, 0xd2,
- 0x24, 0x9a, 0x25, 0xbe, 0x55, 0x1b, 0x4e, 0xbf, 0x3b, 0x82, 0x4c, 0x4f, 0x51,
- 0x57, 0x7c, 0x8b, 0xf6, 0x38, 0xfe, 0x4d, 0x97, 0x32, 0xa8, 0xc8, 0x3c, 0x69,
- 0xe5, 0x91, 0x15, 0x2c, 0x75, 0x8d, 0x92, 0xc1, 0xc7, 0x6b];
-
- let e = &[0x01, 0x00, 0x01];
- let bytes = write_pkcs1(Input::from(n), Input::from(e));
-
- assert_eq!(bytes, Ok(contents));
- }
-
- #[test]
- fn pkcs1_noop_conversion_1() {
- let contents = read_file("./tests/rsa/pkcs1-1.pub");
- let contents = pem::parse(&contents).expect("not PEM").contents;
- assert_eq!(convert_to_pkcs1(&contents), contents);
- }
-
- #[test]
- fn pkcs1_noop_conversion_2() {
- let contents = read_file("./tests/rsa/pkcs1-2.pub");
- let contents = pem::parse(&contents).expect("not PEM").contents;
- assert_eq!(convert_to_pkcs1(&contents), contents);
- }
-
- #[test]
- fn pkcs1_from_spki_conversion_1() {
- let spki = read_file("./tests/rsa/spki-1.pub");
- let spki = pem::parse(&spki).expect("not PEM").contents;
-
- let pkcs1 = read_file("./tests/rsa/pkcs1-1.pub");
- let pkcs1 = pem::parse(&pkcs1).expect("not PEM").contents;
-
- for (i, (a, b)) in convert_to_pkcs1(&spki).iter().zip(pkcs1.iter()).enumerate() {
- println!("{} {} {}", i, a, b);
- }
-
- assert!(convert_to_pkcs1(&spki) == pkcs1);
- }
-
- #[test]
- fn pkcs1_from_spki_conversion_2() {
- let spki = read_file("./tests/rsa/spki-2.pub");
- let spki = pem::parse(&spki).expect("not PEM").contents;
-
- let pkcs1 = read_file("./tests/rsa/pkcs1-2.pub");
- let pkcs1 = pem::parse(&pkcs1).expect("not PEM").contents;
-
- for (i, (a, b)) in convert_to_pkcs1(&spki).iter().zip(pkcs1.iter()).enumerate() {
- println!("{} {} {}", i, a, b);
- }
-
- assert!(convert_to_pkcs1(&spki) == pkcs1);
- }
-}
diff --git a/src/shims.rs b/src/shims.rs
new file mode 100644
index 0000000..850d7eb
--- /dev/null
+++ b/src/shims.rs
@@ -0,0 +1,249 @@
+use chrono::DateTime;
+use chrono::offset::Utc;
+use data_encoding::BASE64URL;
+use std::collections::{HashMap, HashSet};
+
+use Result;
+use crypto;
+use error::Error;
+use metadata;
+
+#[derive(Debug, Serialize, Deserialize)]
+pub struct RootMetadata {
+ #[serde(rename = "type")]
+ typ: metadata::Role,
+ version: u32,
+ consistent_snapshot: bool,
+ expires: DateTime<Utc>,
+ keys: HashMap<crypto::KeyId, crypto::PublicKey>,
+ roles: HashMap<metadata::Role, metadata::RoleDefinition>,
+}
+
+impl RootMetadata {
+ pub fn from(meta: &metadata::RootMetadata) -> Result<Self> {
+ let mut roles = HashMap::new();
+ let _ = roles.insert(metadata::Role::Root, meta.root().clone());
+ let _ = roles.insert(metadata::Role::Snapshot, meta.snapshot().clone());
+ let _ = roles.insert(metadata::Role::Targets, meta.targets().clone());
+ let _ = roles.insert(metadata::Role::Timestamp, meta.timestamp().clone());
+
+ Ok(RootMetadata {
+ typ: metadata::Role::Root,
+ version: meta.version(),
+ expires: meta.expires().clone(),
+ consistent_snapshot: meta.consistent_snapshot(),
+ keys: meta.keys().clone(),
+ roles: roles,
+ })
+ }
+
+ pub fn try_into(mut self) -> Result<metadata::RootMetadata> {
+ if self.typ != metadata::Role::Root {
+ return Err(Error::Encoding(format!(
+ "Attempted to decode root metdata labeled as {:?}",
+ self.typ
+ )));
+ }
+
+ let mut keys = Vec::new();
+ for (key_id, value) in self.keys.drain() {
+ if &key_id != value.key_id() {
+ warn!(
+ "Received key with ID {:?} but calculated it's value as {:?}. \
+ Refusing to add it to the set of trusted keys.",
+ key_id,
+ value.key_id()
+ );
+ } else {
+ debug!(
+ "Found key with good ID {:?}. Adding it to the set of trusted keys.",
+ key_id
+ );
+ keys.push(value);
+ }
+ }
+
+ let root = self.roles.remove(&metadata::Role::Root).ok_or_else(|| {
+ Error::Encoding("Missing root role definition".into())
+ })?;
+ let snapshot = self.roles.remove(&metadata::Role::Snapshot).ok_or_else(
+ || {
+ Error::Encoding("Missing snapshot role definition".into())
+ },
+ )?;
+ let targets = self.roles.remove(&metadata::Role::Targets).ok_or_else(|| {
+ Error::Encoding("Missing targets role definition".into())
+ })?;
+ let timestamp = self.roles.remove(&metadata::Role::Timestamp).ok_or_else(
+ || {
+ Error::Encoding("Missing timestamp role definition".into())
+ },
+ )?;
+
+ metadata::RootMetadata::new(
+ self.version,
+ self.expires,
+ self.consistent_snapshot,
+ keys,
+ root,
+ snapshot,
+ targets,
+ timestamp,
+ )
+ }
+}
+
+#[derive(Serialize, Deserialize)]
+pub struct RoleDefinition {
+ threshold: u32,
+ key_ids: Vec<crypto::KeyId>,
+}
+
+impl RoleDefinition {
+ pub fn from(role: &metadata::RoleDefinition) -> Result<Self> {
+ let mut key_ids = role.key_ids()
+ .iter()
+ .cloned()
+ .collect::<Vec<crypto::KeyId>>();
+ key_ids.sort();
+
+ Ok(RoleDefinition {
+ threshold: role.threshold(),
+ key_ids: key_ids,
+ })
+ }
+
+ pub fn try_into(mut self) -> Result<metadata::RoleDefinition> {
+ let vec_len = self.key_ids.len();
+ if vec_len < 1 {
+ return Err(Error::Encoding(
+ "Role defined with no assoiciated key IDs.".into(),
+ ));
+ }
+
+ let key_ids = self.key_ids.drain(0..).collect::<HashSet<crypto::KeyId>>();
+ let dupes = vec_len - key_ids.len();
+
+ if dupes != 0 {
+ return Err(Error::Encoding(
+ format!("Found {} duplicate key IDs.", dupes),
+ ));
+ }
+
+ Ok(metadata::RoleDefinition::new(self.threshold, key_ids)?)
+ }
+}
+
+#[derive(Serialize, Deserialize)]
+pub struct TimestampMetadata {
+ #[serde(rename = "type")]
+ typ: metadata::Role,
+ version: u32,
+ expires: DateTime<Utc>,
+ meta: HashMap<metadata::MetadataPath, metadata::MetadataDescription>,
+}
+
+impl TimestampMetadata {
+ pub fn from(metadata: &metadata::TimestampMetadata) -> Result<Self> {
+ Ok(TimestampMetadata {
+ typ: metadata::Role::Timestamp,
+ version: metadata.version(),
+ expires: metadata.expires().clone(),
+ meta: metadata.meta().clone(),
+ })
+ }
+
+ pub fn try_into(self) -> Result<metadata::TimestampMetadata> {
+ if self.typ != metadata::Role::Timestamp {
+ return Err(Error::Encoding(format!(
+ "Attempted to decode timestamp metdata labeled as {:?}",
+ self.typ
+ )));
+ }
+
+ metadata::TimestampMetadata::new(self.version, self.expires, self.meta)
+ }
+}
+
+#[derive(Serialize, Deserialize)]
+pub struct SnapshotMetadata {
+ #[serde(rename = "type")]
+ typ: metadata::Role,
+ version: u32,
+ expires: DateTime<Utc>,
+ meta: HashMap<metadata::MetadataPath, metadata::MetadataDescription>,
+}
+
+impl SnapshotMetadata {
+ pub fn from(metadata: &metadata::SnapshotMetadata) -> Result<Self> {
+ Ok(SnapshotMetadata {
+ typ: metadata::Role::Snapshot,
+ version: metadata.version(),
+ expires: metadata.expires().clone(),
+ meta: metadata.meta().clone(),
+ })
+ }
+
+ pub fn try_into(self) -> Result<metadata::SnapshotMetadata> {
+ if self.typ != metadata::Role::Snapshot {
+ return Err(Error::Encoding(format!(
+ "Attempted to decode snapshot metdata labeled as {:?}",
+ self.typ
+ )));
+ }
+
+ metadata::SnapshotMetadata::new(self.version, self.expires, self.meta)
+ }
+}
+
+
+#[derive(Serialize, Deserialize)]
+pub struct TargetsMetadata {
+ #[serde(rename = "type")]
+ typ: metadata::Role,
+ version: u32,
+ expires: DateTime<Utc>,
+ targets: HashMap<metadata::TargetPath, metadata::TargetDescription>,
+}
+
+impl TargetsMetadata {
+ pub fn from(metadata: &metadata::TargetsMetadata) -> Result<Self> {
+ Ok(TargetsMetadata {
+ typ: metadata::Role::Targets,
+ version: metadata.version(),
+ expires: metadata.expires().clone(),
+ targets: metadata.targets().clone(),
+ })
+ }
+
+ pub fn try_into(self) -> Result<metadata::TargetsMetadata> {
+ if self.typ != metadata::Role::Targets {
+ return Err(Error::Encoding(format!(
+ "Attempted to decode targets metdata labeled as {:?}",
+ self.typ
+ )));
+ }
+
+ metadata::TargetsMetadata::new(self.version, self.expires, self.targets)
+ }
+}
+
+#[derive(Serialize, Deserialize)]
+pub struct PublicKey {
+ #[serde(rename = "type")]
+ typ: crypto::KeyType,
+ public_key: String,
+}
+
+impl PublicKey {
+ pub fn new(typ: crypto::KeyType, public_key_bytes: &[u8]) -> Self {
+ PublicKey {
+ typ: typ,
+ public_key: BASE64URL.encode(public_key_bytes),
+ }
+ }
+
+ pub fn public_key(&self) -> &String {
+ &self.public_key
+ }
+}
diff --git a/src/tuf.rs b/src/tuf.rs
index 8b666e4..238f004 100644
--- a/src/tuf.rs
+++ b/src/tuf.rs
@@ -1,1198 +1,338 @@
-use chrono::UTC;
-use json;
-use hyper::Url as HyperUrl;
-use hyper::client::Client;
-use ring::digest;
-use ring::digest::{SHA256, SHA512};
-use std::collections::{HashMap, HashSet};
-use std::fs::{self, File, DirBuilder};
-use std::io::{Read, Write, Seek, SeekFrom};
-use std::path::{PathBuf, Path};
-use url::Url;
-use uuid::Uuid;
+//! Components needed to verify TUF metadata and targets.
-use cjson;
+use chrono::offset::Utc;
+use std::collections::HashSet;
+use std::marker::PhantomData;
+
+use Result;
+use crypto::KeyId;
use error::Error;
-use http;
-use metadata::{Role, RoleType, Root, Targets, Timestamp, Snapshot, Metadata, SignedMetadata,
- RootMetadata, TargetsMetadata, TimestampMetadata, SnapshotMetadata, HashType,
- HashValue, KeyId, Key};
-use util::{self, TempFile};
+use interchange::DataInterchange;
+use metadata::{SignedMetadata, RootMetadata, TimestampMetadata, Role, SnapshotMetadata,
+ MetadataPath, TargetsMetadata, TargetPath, TargetDescription};
-/// A remote TUF repository.
+/// Contains trusted TUF metadata and can be used to verify other metadata and targets.
#[derive(Debug)]
-pub enum RemoteRepo {
- /// An untrusted repository on the same file sytem. Primarily used for testing.
- File(PathBuf),
- /// A repository reachable via HTTP/S.
- Http(Url),
-}
-
-impl RemoteRepo {
- fn as_fetch(&self) -> FetchType {
- match self {
- &RemoteRepo::File(ref path) => FetchType::File(path.clone()),
- &RemoteRepo::Http(ref url) => FetchType::Http(url.clone()),
- }
- }
-}
-
-
-/// Interface for interacting with TUF repositories.
-#[derive(Debug)]
-pub struct Tuf {
- remote: RemoteRepo,
- local_path: PathBuf,
- http_client: Client,
+pub struct Tuf<D: DataInterchange> {
root: RootMetadata,
+ snapshot: Option<SnapshotMetadata>,
targets: Option<TargetsMetadata>,
timestamp: Option<TimestampMetadata>,
- snapshot: Option<SnapshotMetadata>,
+ _interchange: PhantomData<D>,
}
-impl Tuf {
- /// Create a `Tuf` struct from an existing repo with the initial root keys pinned.
- pub fn from_root_keys(root_keys: Vec<Key>, config: Config) -> Result<Self, Error> {
- if config.init {
- Self::initialize(&config.local_path)?;
- }
+impl<D: DataInterchange> Tuf<D> {
+ /// Create a new `TUF` struct from a known set of pinned root keys that are used to verify the
+ /// signed metadata.
+ pub fn from_root_pinned(
+ mut signed_root: SignedMetadata<D, RootMetadata>,
+ root_key_ids: &[KeyId],
+ ) -> Result<Self> {
+ signed_root.signatures_mut().retain(|s| {
+ root_key_ids.contains(s.key_id())
+ });
+ Self::from_root(signed_root)
+ }
- let root = {
- let fetch_type = &FetchType::Cache(config.local_path.clone());
-
- match Self::read_root_with_keys(fetch_type, &config.http_client, &root_keys) {
- Ok(modified_root) => {
- Self::get_metadata::<Root, RootMetadata, File>(fetch_type,
- &config.http_client,
- &Role::Root,
- Some(1),
- true,
- modified_root.root.threshold,
- &modified_root.root.key_ids,
- &modified_root.keys,
- None,
- None,
- &mut None)?
- }
- Err(e) => {
- debug!("Failed to read root locally: {:?}", e);
- let fetch_type = &config.remote.as_fetch();
- let modified_root =
- Self::read_root_with_keys(fetch_type, &config.http_client, &root_keys)?;
- Self::get_metadata::<Root, RootMetadata, File>(fetch_type,
- &config.http_client,
- &Role::Root,
- Some(1),
- true,
- modified_root.root.threshold,
- &modified_root.root.key_ids,
- &modified_root.keys,
- None,
- None,
- &mut None)?
- }
- }
- };
-
- let mut tuf = Tuf {
- remote: config.remote,
- local_path: config.local_path,
- http_client: config.http_client,
+ /// Create a new `TUF` struct from a piece of metadata that is assumed to be trusted.
+ ///
+ /// **WARNING**: This is trust-on-first-use (TOFU) and offers weaker security guarantees than
+ /// the related method `from_root_pinned`.
+ pub fn from_root(signed_root: SignedMetadata<D, RootMetadata>) -> Result<Self> {
+ let root = D::deserialize::<RootMetadata>(signed_root.signed())?;
+ let _ = signed_root.verify(
+ root.root().threshold(),
+ root.root().key_ids(),
+ root.keys(),
+ )?;
+ Ok(Tuf {
root: root,
+ snapshot: None,
targets: None,
timestamp: None,
- snapshot: None,
- };
-
- tuf.update()?;
- Ok(tuf)
- }
-
- /// Create a `Tuf` struct from a new repo. Must contain the `root.json`. The root is trusted
- /// with only verification on consistency, not authenticity.
- pub fn new(config: Config) -> Result<Self, Error> {
- if config.init {
- Self::initialize(&config.local_path)?;
- }
-
- let root = {
- let fetch_type = &FetchType::Cache(config.local_path.clone());
- let (_, root) =
- Self::unverified_read_root(fetch_type, &config.http_client, None)?;
-
- Self::get_metadata::<Root, RootMetadata, File>(fetch_type,
- &config.http_client,
- &Role::Root,
- None,
- true,
- root.root.threshold,
- &root.root.key_ids,
- &root.keys,
- None,
- None,
- &mut None)?
- };
-
- let mut tuf = Tuf {
- remote: config.remote,
- local_path: config.local_path,
- http_client: config.http_client,
- root: root,
- targets: None,
- timestamp: None,
- snapshot: None,
- };
- tuf.update()?;
-
- Ok(tuf)
- }
-
- /// Create and verify the necessary directory structure for a TUF repo.
- pub fn initialize(local_path: &PathBuf) -> Result<(), Error> {
- info!("Initializing local storage: {}",
- local_path.to_string_lossy());
-
- for dir in vec![PathBuf::from("metadata").join("current"),
- PathBuf::from("metadata").join("archive"),
- PathBuf::from("targets"),
- PathBuf::from("temp")]
- .iter() {
- let path = local_path.as_path().join(dir);
- debug!("Creating path: {}", path.to_string_lossy());
- DirBuilder::new().recursive(true).create(path)?
- // TODO error if path is not fully owned by the current user
- }
-
- Ok(())
- }
-
- // TODO clean function that cleans up local_path for old targets, old dirs, etc
-
- fn temp_file(&self) -> Result<TempFile, Error> {
- Ok(TempFile::new(self.local_path.join("temp"))?)
- }
-
- /// Update the metadata from local and remote sources.
- pub fn update(&mut self) -> Result<(), Error> {
- info!("Updating metdata");
- match self.update_local() {
- Ok(()) => (),
- Err(e) => warn!("Error updating metadata from local sources: {:?}", e),
- };
- self.update_remote()?;
- info!("Successfully updated metadata");
- Ok(())
- }
-
- fn update_remote(&mut self) -> Result<(), Error> {
- debug!("Updating metadata from remote sources");
- let fetch_type = &self.remote.as_fetch();
- self.update_root(fetch_type)?;
-
- if self.update_timestamp(fetch_type)? && self.update_snapshot(fetch_type)? {
- self.update_targets(fetch_type)
- } else {
- Ok(())
- }
- }
-
- fn update_local(&mut self) -> Result<(), Error> {
- debug!("Updating metadata from local sources");
- let fetch_type = &FetchType::Cache(self.local_path.clone());
-
- self.update_root(fetch_type)?;
-
- if self.update_timestamp(fetch_type)? && self.update_snapshot(fetch_type)? {
- self.update_targets(fetch_type)
- } else {
- Ok(())
- }
- }
-
- fn update_root(&mut self, fetch_type: &FetchType) -> Result<(), Error> {
- debug!("Updating root metadata");
-
- let (_, temp_root) =
- Self::unverified_read_root(fetch_type, &self.http_client, Some(self.local_path.as_path()))?;
-
- // handle the edge case where we never enter the update look
- // AND the first piece of metadata is expired
- if temp_root.version == 1 && self.root.expires() <= &UTC::now() {
- return Err(Error::ExpiredMetadata(Role::Root));
- }
-
- // TODO reuse temp root as last one
- for i in (self.root.version + 1)..(temp_root.version + 1) {
- let mut temp_file = if !fetch_type.is_cache() {
- Some(self.temp_file()?)
- } else {
- None
- };
-
- let root = Self::get_metadata::<Root, RootMetadata, TempFile>(fetch_type,
- &self.http_client,
- &Role::Root,
- Some(i),
- true,
- self.root.root.threshold,
- &self.root.root.key_ids,
- &self.root.keys,
- None,
- None,
- &mut temp_file)?;
-
- // verify root again against itself (for cross signing)
- // TODO this is not the most efficient way to do it, but it works
- let root_again =
- Self::get_metadata::<Root, RootMetadata, File>(fetch_type,
- &self.http_client,
- &Role::Root,
- Some(i),
- false,
- root.root.threshold,
- &root.root.key_ids,
- &root.keys,
- None,
- None,
- &mut None::<File>)?;
- if root != root_again {
- // TODO better error message
- return Err(Error::Generic(format!("Cross singning of root version {} failed", i)));
- }
-
- info!("Rotated to root metadata version {}", i);
- self.root = root;
-
- match temp_file {
- Some(temp_file) => {
- temp_file.persist(&self.local_path
- .join("metadata")
- .join("archive")
- .join(format!("{}.root.json", i)))?
- }
- None => (),
- };
-
- // set to None to untrust old metadata
- // TODO delete old metadata
- // TODO check that these resets are in line with the Mercury paper
- self.targets = None;
- self.timestamp = None;
- self.snapshot = None;
- }
-
- Ok(())
- }
-
- fn update_timestamp(&mut self, fetch_type: &FetchType) -> Result<bool, Error> {
- debug!("Updating timestamp metadata");
-
- let mut temp_file = if !fetch_type.is_cache() {
- Some(self.temp_file()?)
- } else {
- None
- };
-
- let timestamp =
- Self::get_metadata::<Timestamp, TimestampMetadata, TempFile>(fetch_type,
- &self.http_client,
- &Role::Timestamp,
- None,
- false,
- self.root.timestamp.threshold,
- &self.root.timestamp.key_ids,
- &self.root.keys,
- None,
- None,
- &mut temp_file)?;
-
- match self.timestamp {
- Some(ref t) if t.version > timestamp.version => {
- return Err(Error::VersionDecrease(Role::Timestamp));
- }
- Some(ref t) if t.version == timestamp.version => return Ok(false),
- _ => self.timestamp = Some(timestamp),
- }
-
- if let Some(ref timestamp) = self.timestamp {
- if let Some(ref timestamp_meta) = timestamp.meta.get("snapshot.json") {
- if timestamp_meta.version > timestamp.version {
- info!("Timestamp metadata is up to date");
- return Ok(false);
- }
- }
- }
-
- match temp_file {
- Some(temp_file) => {
- let current_path = self.local_path
- .join("metadata")
- .join("current")
- .join("timestamp.json");
-
- if current_path.exists() {
- fs::rename(current_path.clone(),
- self.local_path
- .join("metadata")
- .join("archive")
- .join("timestamp.json"))?;
- };
-
- temp_file.persist(¤t_path)?
- }
- None => (),
- };
-
- Ok(true)
- }
-
- fn update_snapshot(&mut self, fetch_type: &FetchType) -> Result<bool, Error> {
- debug!("Updating snapshot metadata");
-
- let meta = match self.timestamp {
- Some(ref timestamp) => {
- match timestamp.meta.get("snapshot.json") {
- Some(meta) => meta,
- None => {
- return Err(Error::VerificationFailure("Missing snapshot.json in \
- timestamp.json"
- .to_string()))
- }
- }
- }
- None => return Err(Error::MissingMetadata(Role::Timestamp)),
- };
-
- let (hash_alg, expected_hash): (&HashType, &HashValue) = HashType::preferences().iter()
- .fold(None, |res, pref| {
- res.or_else(|| if let Some(hash) = meta.hashes.get(&pref) {
- Some((pref, hash))
- } else {
- None
- })
- })
- .ok_or_else(|| Error::NoSupportedHashAlgorithms)?;
-
- let mut temp_file = if !fetch_type.is_cache() {
- Some(self.temp_file()?)
- } else {
- None
- };
-
- let snapshot = Self::get_metadata::<Snapshot,
- SnapshotMetadata,
- TempFile>(fetch_type,
- &self.http_client,
- &Role::Snapshot,
- None,
- false,
- self.root.snapshot.threshold,
- &self.root.snapshot.key_ids,
- &self.root.keys,
- Some(meta.length),
- Some((&hash_alg, &expected_hash.0)),
- &mut temp_file)?;
- // TODO ? check downloaded version matches what was in the timestamp.json
-
- match self.snapshot {
- Some(ref s) if s.version > snapshot.version => {
- return Err(Error::VersionDecrease(Role::Snapshot));
- }
- Some(ref s) if s.version == snapshot.version => return Ok(false),
- _ => self.snapshot = Some(snapshot),
- }
-
- // TODO this needs to be extended once we do delegations
- if let Some(ref snapshot) = self.snapshot {
- if let Some(ref snapshot_meta) = snapshot.meta.get("targets.json") {
- if let Some(ref targets) = self.targets {
- if snapshot_meta.version > targets.version {
- info!("Snapshot metadata is up to date");
- return Ok(false);
- }
- }
- }
- }
-
- match temp_file {
- Some(temp_file) => {
- let current_path = self.local_path
- .join("metadata")
- .join("current")
- .join("snapshot.json");
-
- if current_path.exists() {
- fs::rename(current_path.clone(),
- self.local_path
- .join("metadata")
- .join("archive")
- .join("snapshot.json"))?;
- };
-
- temp_file.persist(¤t_path)?
- }
- None => (),
- };
-
- Ok(true)
- }
-
- fn update_targets(&mut self, fetch_type: &FetchType) -> Result<(), Error> {
- debug!("Updating targets metadata");
-
- let meta = match self.snapshot {
- Some(ref snapshot) => {
- match snapshot.meta.get("targets.json") {
- Some(meta) => meta,
- None => {
- return Err(Error::VerificationFailure("Missing targets.json in \
- snapshot.json"
- .to_string()))
- }
- }
- }
- None => return Err(Error::MissingMetadata(Role::Snapshot)),
- };
-
- let hash_data = match meta.hashes {
- Some(ref hashes) => {
- Some(HashType::preferences().iter()
- .fold(None, |res, pref| {
- res.or_else(|| if let Some(hash) = hashes.get(&pref) {
- Some((pref, hash))
- } else {
- None
- })
- })
- .ok_or_else(|| Error::NoSupportedHashAlgorithms)?)
- }
- None => None,
- };
-
- let hash_data = hash_data.map(|(t, v)| (t, v.0.as_slice()));
-
- let mut temp_file = if !fetch_type.is_cache() {
- Some(self.temp_file()?)
- } else {
- None
- };
-
- let targets = Self::get_metadata::<Targets, TargetsMetadata, TempFile>(fetch_type,
- &self.http_client,
- &Role::Targets,
- None,
- false,
- self.root.targets.threshold,
- &self.root.targets.key_ids,
- &self.root.keys,
- meta.length,
- hash_data,
- &mut temp_file)?;
-
- // TODO ? check downloaded version matches what was in the snapshot.json
-
- match self.targets {
- Some(ref t) if t.version > targets.version => {
- return Err(Error::VersionDecrease(Role::Targets));
- }
- Some(ref t) if t.version == targets.version => return Ok(()),
- _ => self.targets = Some(targets),
- }
-
- match temp_file {
- Some(temp_file) => {
- let current_path = self.local_path
- .join("metadata")
- .join("current")
- .join("targets.json");
-
- if current_path.exists() {
- fs::rename(current_path.clone(),
- self.local_path
- .join("metadata")
- .join("archive")
- .join("targets.json"))?;
- };
-
- temp_file.persist(¤t_path)?
- }
- None => (),
- };
-
- Ok(())
- }
-
- fn get_metadata<R: RoleType, M: Metadata<R>, W: Write>(fetch_type: &FetchType,
- http_client: &Client,
- role: &Role,
- metadata_version: Option<i32>,
- allow_expired: bool,
- threshold: i32,
- trusted_ids: &[KeyId],
- available_keys: &HashMap<KeyId, Key>,
- size: Option<i64>,
- hash_data: Option<(&HashType,
- &[u8])>,
- mut out: &mut Option<W>)
- -> Result<M, Error> {
-
- debug!("Loading metadata from {:?}", fetch_type);
- let metadata_version_str = metadata_version.map(|x| format!("{}.", x))
- .unwrap_or_else(|| "".to_string());
-
- let buf: Vec<u8> = match fetch_type {
- &FetchType::Cache(ref local_path) => {
- let path = local_path.join("metadata")
- .join("current")
- .join(format!("{}{}.json", metadata_version_str, role));
- info!("Reading metadata from local path: {:?}", path);
-
- let mut file = File::open(path.clone()).map_err(|e| Error::from_io(e, &path))?;
- let mut buf = Vec::new();
-
- match (size, hash_data) {
- (None, None) => file.read_to_end(&mut buf).map(|_| ())?,
- _ => Self::read_and_verify(&mut file, &mut Some(&mut buf), size, hash_data)?,
- };
-
- buf
- }
- &FetchType::File(ref path) => {
- let path = path.join(format!("{}{}.json", metadata_version_str, role));
- info!("Reading metadata from path: {:?}", path);
-
- let mut file = File::open(path.clone()).map_err(|e| Error::from_io(e, &path))?;
- let mut buf = Vec::new();
-
- match (size, hash_data) {
- (None, None) => file.read_to_end(&mut buf).map(|_| ())?,
- _ => Self::read_and_verify(&mut file, &mut Some(&mut buf), size, hash_data)?,
- };
-
- buf
- }
- &FetchType::Http(ref url) => {
- let mut url = url.clone();
- {
- url.path_segments_mut()
- .map_err(|_| Error::Generic("URL path could not be mutated".to_string()))?
- .push(&format!("{}{}.json", metadata_version_str, role));
- }
- let mut resp = http::get(http_client, &url)?;
- let mut buf = Vec::new();
-
- match (size, hash_data) {
- (None, None) => resp.read_to_end(&mut buf).map(|_| ())?,
- _ => Self::read_and_verify(&mut resp, &mut Some(&mut buf), size, hash_data)?,
- };
-
- buf
- }
- };
-
- let signed: SignedMetadata<R> = json::from_slice(&buf)?;
- // TODO clone
- Self::verify_meta::<R>(signed.clone(), role, threshold, trusted_ids, available_keys)?;
- let meta: M = json::from_value(signed.signed)?;
-
- if !allow_expired && meta.expires() <= &UTC::now() {
- return Err(Error::ExpiredMetadata(role.clone()));
- }
-
- match out {
- &mut Some(ref mut out) => out.write_all(&buf)?,
- &mut None => (),
- };
-
- Ok(meta)
- }
-
- fn unverified_read_root(fetch_type: &FetchType,
- http_client: &Client,
- local_path: Option<&Path>)
- -> Result<(Option<TempFile>, RootMetadata), Error> {
- let (temp_file, buf): (Option<TempFile>, Vec<u8>) = match fetch_type {
- &FetchType::Cache(ref local_path) => {
- let path = local_path.join("metadata")
- .join("current")
- .join("root.json");
- let mut file = File::open(path.clone()).map_err(|e| Error::from_io(e, &path))?;
- let mut buf = Vec::new();
- file.read_to_end(&mut buf).map(|_| ())?;
- (None, buf)
- }
- &FetchType::File(ref path) => {
- let local_path = local_path.ok_or_else(|| {
- let msg = "Programming error. No local path supplied for remote file read";
- error!("{}", msg);
- Error::Generic(msg.to_string())
- })?;
- let dest_path = local_path.join("temp")
- .join(Uuid::new_v4().hyphenated().to_string());
-
- let src_path = path.join("root.json");
- fs::copy(src_path, dest_path.clone())?;
-
- let mut temp_file = TempFile::from_existing(dest_path)
- .map_err(|e| Error::from_io(e, &path))?;
- let mut buf = Vec::new();
- temp_file.read_to_end(&mut buf).map(|_| ())?;
- temp_file.seek(SeekFrom::Start(0))
- .map_err(|e| Error::from_io(e, &path))?;
-
- (Some(temp_file), buf)
- }
- &FetchType::Http(ref url) => {
- let local_path = local_path.ok_or_else(|| {
- let msg = "Programming error. No local path supplied for remote HTTP read";
- error!("{}", msg);
- Error::Generic(msg.to_string())
- })?;
-
- let mut temp_file = TempFile::new(local_path.to_path_buf())?;
-
- let mut url = url.clone();
- {
- url.path_segments_mut()
- .map_err(|_| Error::Generic("URL path could not be mutated".to_string()))?
- .push("root.json");
- }
- let mut resp = http::get(http_client, &url)?;
- let mut buf = Vec::new();
- resp.read_to_end(&mut buf).map(|_| ())?;
-
- temp_file.write_all(&buf).map(|_| ())?;
- temp_file.seek(SeekFrom::Start(0))?;
-
- (Some(temp_file), buf)
- }
- };
-
- let signed: SignedMetadata<Root> = json::from_slice(&buf)?;
- let root_str = signed.signed.to_string();
- Ok((temp_file, json::from_str(&root_str)?))
- }
-
- /// Read the root.json metadata and replace keys for the root role with the keys that are given
- /// as arguments to this function. This initial read is unverified in any way.
- fn read_root_with_keys(fetch_type: &FetchType,
- http_client: &Client,
- root_keys: &[Key])
- -> Result<RootMetadata, Error> {
- let buf: Vec<u8> = match fetch_type {
- &FetchType::Cache(ref local_path) => {
- let path = local_path.join("metadata")
- .join("archive")
- .join("1.root.json");
-
- debug!("Reading root.json from path: {:?}", path);
-
- let mut file = File::open(path.clone()).map_err(|e| Error::from_io(e, &path))?;
- let mut buf = Vec::new();
- file.read_to_end(&mut buf).map(|_| ())?;
- buf
- }
- &FetchType::File(ref path) => {
- let path = path.join("1.root.json");
- let mut file = File::open(path.clone()).map_err(|e| Error::from_io(e, &path))?;
- let mut buf = Vec::new();
- file.read_to_end(&mut buf).map(|_| ())?;
- buf
- }
- &FetchType::Http(ref url) => {
- let mut url = url.clone();
- {
- url.path_segments_mut()
- .map_err(|_| Error::Generic("URL path could not be mutated".to_string()))?
- .push("1.root.json");
- }
- let mut resp = http::get(http_client, &url)?;
- let mut buf = Vec::new();
- resp.read_to_end(&mut buf).map(|_| ())?;
- buf
- }
- };
-
- // TODO once serialize is implemented for all the types, don't use this
- // json manipulation mess here
- let mut signed = json::from_slice::<SignedMetadata<Root>>(&buf)?;
- if let json::Value::Object(ref mut object) = signed.signed {
- if let Some(&mut json::Value::Object(ref mut roles)) = object.get_mut("roles") {
- if let Some(&mut json::Value::Object(ref mut root)) = roles.get_mut("root") {
- if let Some(&mut json::Value::Array(ref mut key_ids)) = root.get_mut("keyids") {
- key_ids.clear();
- key_ids.extend(root_keys.iter()
- .map(|k| json::Value::String(k.value.key_id().0)));
- }
- }
- }
- }
-
- Ok(json::from_value(signed.signed)?)
- }
-
- fn verify_meta<R: RoleType>(signed: SignedMetadata<R>,
- role: &Role,
- threshold: i32,
- trusted_ids: &[KeyId],
- available_keys: &HashMap<KeyId, Key>)
- -> Result<(), Error> {
- let bytes =
- cjson::canonicalize(&signed.signed).map_err(|err| Error::CanonicalJsonError(err))?;
-
- let unique_count = signed.signatures
- .iter()
- .map(|s| &s.key_id)
- .collect::<HashSet<&KeyId>>()
- .len();
-
- if signed.signatures.len() != unique_count {
- return Err(Error::NonUniqueSignatures(role.clone()));
- }
-
- let keys = trusted_ids.iter()
- .map(|id| (id, available_keys.get(id)))
- .fold(HashMap::new(), |mut m, (id, k)| {
- if let Some(key) = k {
- m.insert(id, key);
- } else {
- debug!("unknown key id: {:?}", id);
- }
- m
- });
-
- if threshold <= 0 {
- return Err(Error::VerificationFailure("Threshold not >= 1".into()));
- }
-
- let mut valid_sigs = 0;
- for sig in signed.signatures.iter() {
- if let Some(key) = keys.get(&sig.key_id) {
- debug!("Verifying role {:?} with key ID {:?}",
- role,
- sig.key_id);
-
- match key.verify(&sig.method, &bytes, &sig.sig) {
- Ok(()) => {
- debug!("Good signature from key ID {:?}", sig.key_id);
- valid_sigs += 1;
- }
- Err(e) => warn!("Failed to verify with key ID {:?}: {:?}", &sig.key_id, e),
- }
- if valid_sigs == threshold {
- return Ok(());
- }
- }
- }
-
- info!("Threshold not met: {}/{}", valid_sigs, threshold);
- return Err(Error::UnmetThreshold(role.clone()));
- }
-
- /// Reads a target from local storage or fetches it from a remote repository. Verifies the
- /// target. Fails if the target is missing, or if the metadata chain that leads to it cannot
- /// be verified.
- // TODO ? stronger input type
- pub fn fetch_target(&self, target: &str) -> Result<PathBuf, Error> {
- let metadata_chain = match self.targets {
- Some(ref targets) => TargetPathIterator::new(&self, targets.clone(), target),
- None => return Err(Error::MissingMetadata(Role::Targets)),
- };
- for ref targets_meta in metadata_chain {
- let target_meta = match targets_meta.targets.get(target) {
- Some(meta) => meta,
- None => continue,
- };
-
- let (hash_alg, expected_hash): (&HashType, HashValue) = HashType::preferences().iter()
- .fold(None, |res, pref| {
- res.or_else(|| if let Some(hash) = target_meta.hashes.get(&pref) {
- Some((pref, hash.clone()))
- } else {
- None
- })
- })
- .ok_or_else(|| Error::NoSupportedHashAlgorithms)?;
-
- // TODO correctly split path
- let path = self.local_path.join("targets").join(util::url_path_to_os_path(target)?);
- info!("reading target from local path: {:?}", path);
-
- if path.exists() {
- let mut file = File::open(path.clone()).map_err(|e| Error::from_io(e, &path))?;
- Self::read_and_verify(&mut file,
- &mut None::<&mut File>,
- Some(target_meta.length),
- Some((&hash_alg, &expected_hash.0)))?;
- let _ = file.seek(SeekFrom::Start(0))?;
- return Ok(path);
- } else {
- let mut temp_file = self.temp_file()?;
-
- match self.remote {
- RemoteRepo::File(ref path) => {
- let mut path = path.clone();
- path.extend(util::url_path_to_path_components(target)?);
- let mut file = File::open(path.clone()).map_err(|e| Error::from_io(e, &path))?;
-
- match Self::read_and_verify(&mut file,
- &mut Some(temp_file.file_mut()?),
- Some(target_meta.length),
- Some((&hash_alg, &expected_hash.0))) {
- Ok(()) => {
- let mut storage_path = self.local_path.join("targets");
- storage_path.extend(util::url_path_to_path_components(target)?);
-
- {
- let parent = storage_path.parent()
- .ok_or_else(|| Error::Generic("Path had no parent".to_string()))?;
-
- DirBuilder::new()
- .recursive(true)
- .create(parent)?;
- }
-
- temp_file.persist(&storage_path)?;
- return Ok(storage_path)
- }
- Err(e) => warn!("Error verifying target: {:?}", e),
- }
- }
- RemoteRepo::Http(ref url) => {
- let mut url = url.clone();
- {
- url.path_segments_mut()
- .map_err(|_| Error::Generic("URL path could not be mutated".to_string()))?
- .extend(util::url_path_to_path_components(&target)?);
- }
- let url = util::url_to_hyper_url(&url)?;
- let mut resp = http::get(&self.http_client, &url)?;
-
- match Self::read_and_verify(&mut resp,
- &mut Some(temp_file.file_mut()?),
- Some(target_meta.length),
- Some((&hash_alg, &expected_hash.0))) {
- Ok(()) => {
- // TODO this isn't windows friendly
- let mut storage_path = self.local_path.join("targets");
- storage_path.extend(util::url_path_to_path_components(target)?);
-
- {
- let parent = storage_path.parent()
- .ok_or_else(|| Error::Generic("Path had no parent".to_string()))?;
-
- DirBuilder::new()
- .recursive(true)
- .create(parent)?;
- }
-
- temp_file.persist(&storage_path)?;
-
- return Ok(storage_path)
- }
- Err(e) => warn!("Error verifying target: {:?}", e),
- }
- }
- }
- }
- };
-
- Err(Error::UnavailableTarget)
- }
-
- fn read_and_verify<R: Read, W: Write>(input: &mut R,
- output: &mut Option<W>,
- size: Option<i64>,
- hash_data: Option<(&HashType, &[u8])>)
- -> Result<(), Error> {
- let mut context = match hash_data {
- Some((&HashType::Sha512, _)) => Some(digest::Context::new(&SHA512)),
- Some((&HashType::Sha256, _)) => Some(digest::Context::new(&SHA256)),
- Some((&HashType::Unsupported(_), _)) => return Err(Error::NoSupportedHashAlgorithms),
- _ => None,
- };
-
- let mut buf = [0; 1024];
- let mut bytes_left = size;
-
- loop {
- match input.read(&mut buf) {
- Ok(read_bytes) => {
- if read_bytes == 0 {
- break;
- }
-
- match output {
- &mut Some(ref mut output) => output.write_all(&buf[0..read_bytes])?,
- &mut None => (),
- };
-
- match context {
- Some(ref mut c) => c.update(&buf[0..read_bytes]),
- None => (),
- };
-
- match bytes_left {
- Some(ref mut bytes_left) => {
- *bytes_left -= read_bytes as i64;
- if *bytes_left == 0 {
- break;
- } else if *bytes_left < 0 {
- return Err(Error::UnavailableTarget);
- }
- }
- None => (),
- };
- }
- e @ Err(_) => e.map(|_| ())?,
- }
- }
-
- let generated_hash = context.map(|c| c.finish());
-
- match (generated_hash, hash_data) {
- (Some(generated_hash), Some((_, expected_hash))) if generated_hash.as_ref() !=
- expected_hash => {
- Err(Error::UnavailableTarget)
- }
- // this should never happen, so err if it does for safety
- (Some(_), None) => {
- let msg = "Hash calculated when no expected hash supplied";
- error!("Programming error. Please report this as a bug: {}", msg);
- Err(Error::VerificationFailure(msg.to_string()))
- }
- // this should never happen, so err if it does for safety
- (None, Some(_)) => {
- let msg = "No hash calculated when expected hash supplied";
- error!("Programming error. Please report this as a bug: {}", msg);
- Err(Error::VerificationFailure(msg.to_string()))
- }
- (Some(_), Some(_)) |
- (None, None) => Ok(()),
- }
- }
-}
-
-
-/// The configuration used to initialize a `Tuf` struct.
-pub struct Config {
- remote: RemoteRepo,
- local_path: PathBuf,
- http_client: Client,
- init: bool,
-}
-
-impl Config {
- /// Create a new builder with the default configurations where applicable.
- pub fn build() -> ConfigBuilder {
- ConfigBuilder::new()
- }
-}
-
-
-/// Helper that constructs `Config`s and verifies the options.
-pub struct ConfigBuilder {
- remote: Option<RemoteRepo>,
- local_path: Option<PathBuf>,
- http_client: Option<Client>,
- init: bool,
-}
-
-impl ConfigBuilder {
- /// Create a new builder with the default configurations where applicable.
- pub fn new() -> Self {
- ConfigBuilder {
- remote: None,
- local_path: None,
- http_client: None,
- init: true,
- }
- }
-
- /// The remote TUF repo.
- pub fn remote(mut self, remote: RemoteRepo) -> Self {
- self.remote = Some(remote);
- self
- }
-
- /// The local path for metadata and target storage.
- pub fn local_path(mut self, local_path: PathBuf) -> Self {
- self.local_path = Some(local_path);
- self
- }
-
- /// The `hyper::client::Client` to use. Default: `Client::new()`.
- pub fn http_client(mut self, client: Client) -> Self {
- self.http_client = Some(client);
- self
- }
-
- /// Where or not to initialize the local directory structures.
- pub fn init(mut self, init: bool) -> Self {
- self.init = init;
- self
- }
-
- /// Verify the configuration.
- pub fn finish(self) -> Result<Config, Error> {
- let remote = self.remote
- .ok_or_else(|| Error::InvalidConfig("Remote repository was not set".to_string()))?;
-
- let local_path = self.local_path
- .ok_or_else(|| Error::InvalidConfig("Local path was not set".to_string()))?;
-
- Ok(Config {
- remote: remote,
- local_path: local_path,
- http_client: self.http_client.unwrap_or_else(|| Client::new()),
- init: self.init,
+ _interchange: PhantomData,
})
}
-}
-
-#[derive(Debug)]
-enum FetchType {
- Cache(PathBuf),
- File(PathBuf),
- Http(HyperUrl),
-}
-
-impl FetchType {
- fn is_cache(&self) -> bool {
- match self {
- &FetchType::Cache(_) => true,
- _ => false,
- }
+ /// An immutable reference to the root metadata.
+ pub fn root(&self) -> &RootMetadata {
+ &self.root
}
-}
-
-struct TargetPathIterator<'a> {
- tuf: &'a Tuf,
- targets: TargetsMetadata,
- target: &'a str,
- terminate: bool,
- targets_checked: bool,
- roles_index: usize,
- sub_iter: Option<Box<TargetPathIterator<'a>>>,
-}
-
-impl<'a> TargetPathIterator<'a> {
- fn new(tuf: &'a Tuf, targets: TargetsMetadata, target: &'a str) -> Self {
- TargetPathIterator {
- tuf: tuf,
- targets: targets,
- target: target,
- terminate: false,
- targets_checked: false,
- roles_index: 0,
- sub_iter: None,
- }
+ /// An immutable reference to the optional snapshot metadata.
+ pub fn snapshot(&self) -> Option<&SnapshotMetadata> {
+ self.snapshot.as_ref()
}
-}
-impl<'a> Iterator for TargetPathIterator<'a> {
- type Item = TargetsMetadata;
+ /// An immutable reference to the optional targets metadata.
+ pub fn targets(&self) -> Option<&TargetsMetadata> {
+ self.targets.as_ref()
+ }
- fn next(&mut self) -> Option<Self::Item> {
- if self.terminate {
- return None
- }
+ /// An immutable reference to the optional timestamp metadata.
+ pub fn timestamp(&self) -> Option<&TimestampMetadata> {
+ self.timestamp.as_ref()
+ }
- match self.targets.targets.get(self.target) {
- Some(_) if !self.targets_checked => {
- self.targets_checked = true;
- Some(self.targets.clone())
- },
- _ => {
- match self.targets.delegations {
- Some(ref delegations) => {
- for delegation in delegations.roles.iter().skip(self.roles_index) {
- if delegation.terminating {
- self.terminate = true;
- }
+ /// Return the list of all available targets.
+ pub fn available_targets(&self) -> Result<HashSet<&TargetPath>> {
+ let _ = self.safe_root_ref()?; // ensure root still valid
+ // TODO add delegations
+ Ok(
+ self.safe_targets_ref()?
+ .targets()
+ .keys()
+ .collect::<HashSet<&TargetPath>>(),
+ )
+ }
- self.roles_index += 1;
+ /// Verify and update the root metadata.
+ pub fn update_root(&mut self, signed_root: SignedMetadata<D, RootMetadata>) -> Result<bool> {
+ signed_root.verify(
+ self.root.root().threshold(),
+ self.root.root().key_ids(),
+ self.root.keys(),
+ )?;
- let (version, length, hash_data) = match self.tuf.snapshot {
- Some(ref snapshot) => {
- match snapshot.meta.get(&format!("{}.json", delegation.name)) {
- Some(meta) => {
- let hash_data = match meta.hashes {
- Some(ref hashes) => {
- match HashType::preferences().iter()
- .fold(None, |res, pref| {
- res.or_else(|| if let Some(hash) = hashes.get(&pref) {
- Some((pref, hash))
- } else {
- None
- })
- }) {
- Some(pair) => Some(pair.clone()),
- None => {
- warn!("No suitable hash algorithms. Refusing to trust metadata: {:?}",
- delegation.name);
- continue
- }
- }
- },
- None => None,
- };
- (meta.version, meta.length, hash_data)
- },
- None => continue // TODO err msg
- }
- }
- None => continue // TODO err msg
- };
+ let root = D::deserialize::<RootMetadata>(signed_root.signed())?;
- // TODO extract hash/len from snapshot and use in verification
- if delegation.could_have_target(&self.target) {
- match Tuf::get_metadata::<Targets,
- TargetsMetadata,
- File>(&self.tuf.remote.as_fetch(),
- &self.tuf.http_client,
- &Role::TargetsDelegation(delegation.name.clone()),
- None,
- false,
- delegation.threshold,
- &delegation.key_ids,
- &delegations.keys,
- length,
- hash_data.map(|(a, h)| (a, &*h.0)),
- &mut None) {
- Ok(meta) => {
- if meta.version != version {
- warn!("The metadata for {:?} had version {} but snapshot reported {}",
- delegation.name, meta.version, version);
- continue
- }
-
- let mut iter = TargetPathIterator::new(&self.tuf,
- meta.clone(),
- self.target);
- let res = iter.next();
- if delegation.terminating && res.is_none() {
- return None
- } else if res.is_some() {
- self.sub_iter = Some(Box::new(iter));
- return res
- } else {
- continue
- }
- }
- Err(e) => warn!("Error fetching metadata: {:?}", e),
- }
- } else {
- continue
- }
- }
- return None
- },
- None => return None,
- }
+ match root.version() {
+ x if x == self.root.version() => {
+ info!(
+ "Attempted to update root to new metadata with the same version. \
+ Refusing to update."
+ );
+ return Ok(false);
}
+ x if x < self.root.version() => {
+ return Err(Error::VerificationFailure(format!(
+ "Attempted to roll back root metadata at version {} to {}.",
+ self.root.version(),
+ x
+ )))
+ }
+ _ => (),
+ }
+
+ let _ = signed_root.verify(
+ root.root().threshold(),
+ root.root().key_ids(),
+ root.keys(),
+ )?;
+
+ self.purge_metadata();
+
+ self.root = root;
+ Ok(true)
+ }
+
+ /// Verify and update the timestamp metadata.
+ pub fn update_timestamp(
+ &mut self,
+ signed_timestamp: SignedMetadata<D, TimestampMetadata>,
+ ) -> Result<bool> {
+ signed_timestamp.verify(
+ self.root.timestamp().threshold(),
+ self.root.timestamp().key_ids(),
+ self.root.keys(),
+ )?;
+
+ let current_version = self.timestamp.as_ref().map(|t| t.version()).unwrap_or(0);
+ let timestamp: TimestampMetadata = D::deserialize(&signed_timestamp.signed())?;
+
+ if timestamp.expires() <= &Utc::now() {
+ return Err(Error::ExpiredMetadata(Role::Timestamp));
+ }
+
+ if timestamp.version() < current_version {
+ Err(Error::VerificationFailure(format!(
+ "Attempted to roll back timestamp metadata at version {} to {}.",
+ current_version,
+ timestamp.version()
+ )))
+ } else if timestamp.version() == current_version {
+ Ok(false)
+ } else {
+ self.timestamp = Some(timestamp);
+ Ok(true)
+ }
+ }
+
+ /// Verify and update the snapshot metadata.
+ pub fn update_snapshot(
+ &mut self,
+ signed_snapshot: SignedMetadata<D, SnapshotMetadata>,
+ ) -> Result<bool> {
+ let snapshot = {
+ let root = self.safe_root_ref()?;
+ let timestamp = self.safe_timestamp_ref()?;
+ let snapshot_description = timestamp
+ .meta()
+ .get(&MetadataPath::from_role(&Role::Snapshot))
+ .ok_or_else(|| {
+ Error::VerificationFailure(
+ "Timestamp metadata had no description of the snapshot metadata".into(),
+ )
+ })?;
+
+ let current_version = self.snapshot.as_ref().map(|t| t.version()).unwrap_or(0);
+
+ if snapshot_description.version() < current_version {
+ return Err(Error::VerificationFailure(format!(
+ "Attempted to roll back snapshot metadata at version {} to {}.",
+ current_version,
+ snapshot_description.version()
+ )));
+ } else if snapshot_description.version() == current_version {
+ return Ok(false);
+ }
+
+ signed_snapshot.verify(
+ root.snapshot().threshold(),
+ root.snapshot().key_ids(),
+ root.keys(),
+ )?;
+
+ let snapshot: SnapshotMetadata = D::deserialize(&signed_snapshot.signed())?;
+
+ if snapshot.version() != snapshot_description.version() {
+ return Err(Error::VerificationFailure(format!(
+ "The timestamp metadata reported that the snapshot metadata should be at \
+ version {} but version {} was found instead.",
+ snapshot_description.version(),
+ snapshot.version()
+ )));
+ }
+
+ if snapshot.expires() <= &Utc::now() {
+ return Err(Error::ExpiredMetadata(Role::Snapshot));
+ }
+
+ snapshot
+ };
+
+ self.snapshot = Some(snapshot);
+ Ok(true)
+ }
+
+ /// Verify and update the targets metadata.
+ pub fn update_targets(
+ &mut self,
+ signed_targets: SignedMetadata<D, TargetsMetadata>,
+ ) -> Result<bool> {
+ let targets = {
+ let root = self.safe_root_ref()?;
+ let snapshot = self.safe_snapshot_ref()?;
+ let targets_description = snapshot
+ .meta()
+ .get(&MetadataPath::from_role(&Role::Targets))
+ .ok_or_else(|| {
+ Error::VerificationFailure(
+ "Snapshot metadata had no description of the targets metadata".into(),
+ )
+ })?;
+
+ let current_version = self.targets.as_ref().map(|t| t.version()).unwrap_or(0);
+
+ if targets_description.version() < current_version {
+ return Err(Error::VerificationFailure(format!(
+ "Attempted to roll back targets metadata at version {} to {}.",
+ current_version,
+ targets_description.version()
+ )));
+ } else if targets_description.version() == current_version {
+ return Ok(false);
+ }
+
+ signed_targets.verify(
+ root.targets().threshold(),
+ root.targets().key_ids(),
+ root.keys(),
+ )?;
+
+ let targets: TargetsMetadata = D::deserialize(&signed_targets.signed())?;
+
+ if targets.version() != targets_description.version() {
+ return Err(Error::VerificationFailure(format!(
+ "The timestamp metadata reported that the targets metadata should be at \
+ version {} but version {} was found instead.",
+ targets_description.version(),
+ targets.version()
+ )));
+ }
+
+ if targets.expires() <= &Utc::now() {
+ return Err(Error::ExpiredMetadata(Role::Snapshot));
+ }
+ targets
+ };
+
+ self.targets = Some(targets);
+ Ok(true)
+ }
+
+ /// Get a reference to the description needed to verify the target defined by the given
+ /// `TargetPath`. Returns an `Error` if the target is not defined in the trusted metadata. This
+ /// may mean the target exists somewhere in the metadata, but the chain of trust to that target
+ /// may be invalid or incomplete.
+ pub fn target_description(&self, target_path: &TargetPath) -> Result<&TargetDescription> {
+ let _ = self.safe_root_ref()?;
+ let _ = self.safe_snapshot_ref()?;
+ let targets = self.safe_targets_ref()?;
+
+ targets.targets().get(target_path).ok_or(
+ Error::TargetUnavailable,
+ )
+
+ // TODO include searching delegations
+ }
+
+ fn purge_metadata(&mut self) {
+ self.snapshot = None;
+ self.targets = None;
+ self.timestamp = None;
+ // TODO include delegations
+ }
+
+ fn safe_root_ref(&self) -> Result<&RootMetadata> {
+ if self.root.expires() <= &Utc::now() {
+ return Err(Error::ExpiredMetadata(Role::Root));
+ }
+ Ok(&self.root)
+ }
+
+ fn safe_snapshot_ref(&self) -> Result<&SnapshotMetadata> {
+ match &self.snapshot {
+ &Some(ref snapshot) => {
+ if snapshot.expires() <= &Utc::now() {
+ return Err(Error::ExpiredMetadata(Role::Snapshot));
+ }
+ Ok(snapshot)
+ }
+ &None => Err(Error::MissingMetadata(Role::Snapshot)),
+ }
+ }
+
+ fn safe_targets_ref(&self) -> Result<&TargetsMetadata> {
+ match &self.targets {
+ &Some(ref targets) => {
+ if targets.expires() <= &Utc::now() {
+ return Err(Error::ExpiredMetadata(Role::Targets));
+ }
+ Ok(targets)
+ }
+ &None => Err(Error::MissingMetadata(Role::Targets)),
+ }
+ }
+ fn safe_timestamp_ref(&self) -> Result<&TimestampMetadata> {
+ match &self.timestamp {
+ &Some(ref timestamp) => {
+ if timestamp.expires() <= &Utc::now() {
+ return Err(Error::ExpiredMetadata(Role::Timestamp));
+ }
+ Ok(timestamp)
+ }
+ &None => Err(Error::MissingMetadata(Role::Timestamp)),
}
}
}
diff --git a/src/util.rs b/src/util.rs
deleted file mode 100644
index 85991a5..0000000
--- a/src/util.rs
+++ /dev/null
@@ -1,167 +0,0 @@
-use hyper;
-use std::fs::{self, File};
-use std::io::{self, Read, Write, Seek, SeekFrom};
-use std::path::{Path, PathBuf};
-use url::Url;
-use url::percent_encoding::percent_decode;
-use uuid::Uuid;
-
-use error::Error;
-
-/// Converts a URL string (without scheme) into an OS specific path.
-pub fn url_path_to_os_path(url_path: &str) -> Result<PathBuf, Error> {
- let url_path = if cfg!(os = "windows") {
- url_path.replace("/", r"\")
- } else {
- url_path.to_string()
- };
-
- let url_path = percent_decode(url_path.as_bytes())
- .decode_utf8()
- .map_err(|e| Error::Generic(format!("{}", e)))?
- .into_owned();
-
- Ok(Path::new(&url_path).to_path_buf())
-}
-
-pub fn url_path_to_path_components(url_path: &str) -> Result<Vec<String>, Error> {
- let mut out = Vec::new();
- for component in url_path.split("/") {
- let component = percent_decode(component.as_bytes())
- .decode_utf8()
- .map_err(|e| Error::Generic(format!("Path component not utf-8: {:?}", e)))?
- .into_owned();
- out.push(component);
- }
- Ok(out)
-}
-
-/// Converts a `url::Url` into a `hyper::Url`.
-pub fn url_to_hyper_url(url: &Url) -> Result<hyper::Url, Error> {
- Ok(hyper::Url::parse(url.as_str())?)
-}
-
-
-#[derive(Debug)]
-struct TempFileInner {
- path: PathBuf,
- file: File,
-}
-
-#[derive(Debug)]
-pub struct TempFile(Option<TempFileInner>);
-
-impl TempFile {
- pub fn new(prefix: PathBuf) -> Result<Self, io::Error> {
- let path = prefix.join(Uuid::new_v4().hyphenated().to_string());
- Ok(TempFile(Some(TempFileInner {
- path: path.clone(),
- file: File::create(path)?,
- })))
- }
-
- pub fn from_existing(path: PathBuf) -> Result<Self, io::Error> {
- Ok(TempFile(Some( TempFileInner {
- path: path.clone(),
- file: File::open(path)?,
- })))
- }
-
- pub fn file_mut(&mut self) -> Result<&mut File, io::Error> {
- match self.0 {
- Some(ref mut inner) => Ok(&mut inner.file),
- None => Err(io::Error::new(io::ErrorKind::Other, "invalid TempFile reference"))
- }
- }
-
- pub fn persist(mut self, dest: &Path) -> Result<(), io::Error> {
- match self.0.take() {
- Some(inner) => fs::rename(inner.path, dest),
- None => Err(io::Error::new(io::ErrorKind::Other, "invalid TempFile reference")),
- }
- }
-}
-
-impl Write for TempFile {
- fn write(&mut self, buf: &[u8]) -> Result<usize, io::Error> {
- self.file_mut()?.write(buf)
- }
-
- fn flush(&mut self) -> Result<(), io::Error> {
- self.file_mut()?.flush()
- }
-}
-
-impl Read for TempFile {
- fn read(&mut self, buf: &mut [u8]) -> Result<usize, io::Error> {
- self.file_mut()?.read(buf)
- }
-}
-
-impl Seek for TempFile {
- fn seek(&mut self, pos: SeekFrom) -> Result<u64, io::Error> {
- self.file_mut()?.seek(pos)
- }
-}
-
-impl Drop for TempFile {
- fn drop(&mut self) {
- match self.0.take() {
- Some(inner) => {
- drop(inner.file);
- match fs::remove_file(inner.path) {
- Ok(()) => (),
- Err(e) => warn!("Failed to delete tempfile: {:?}", e),
- }
- },
- None => (),
- }
- }
-}
-
-
-#[cfg(test)]
-mod test {
- use super::*;
-
- #[test]
- #[cfg(not(target_os = "windows"))]
- fn test_url_path_to_os_path_nix() {
- let path = "/tmp/test";
- assert_eq!(url_path_to_os_path(path), Ok(PathBuf::from("/tmp/test")));
- }
-
- #[test]
- #[cfg(not(target_os = "windows"))]
- fn test_url_path_to_os_path_percent_nix() {
- let path = "/tmp/test%20stuff";
- assert_eq!(url_path_to_os_path(path),
- Ok(PathBuf::from("/tmp/test stuff")));
- }
-
- #[test]
- #[cfg(target_os = "windows")]
- fn test_url_path_to_os_path_win() {
- let path = r"C:/tmp/test";
- assert_eq!(url_path_to_os_path(path), Ok(PathBuf::from(r"C:\tmp\test")));
- }
-
- #[test]
- #[cfg(target_os = "windows")]
- fn test_url_path_to_os_path_spaces_win() {
- let path = r"C:/tmp/test%20stuff";
- assert_eq!(url_path_to_os_path(path),
- Ok(PathBuf::from(r"C:\tmp\test stuff")));
- }
-
- #[test]
- fn test_url_path_to_path_components() {
- let path = "test/foo";
- assert_eq!(url_path_to_path_components(path),
- Ok(vec!["test".into(), "foo".into()]));
-
- let path = "test/foo%20bar";
- assert_eq!(url_path_to_path_components(path),
- Ok(vec!["test".into(), "foo bar".into()]));
- }
-}
diff --git a/tests/ed25519/ed25519-1.pk8.der b/tests/ed25519/ed25519-1.pk8.der
new file mode 100644
index 0000000..ea5dda6
--- /dev/null
+++ b/tests/ed25519/ed25519-1.pk8.der
Binary files differ
diff --git a/tests/ed25519/ed25519-1.spki.der b/tests/ed25519/ed25519-1.spki.der
new file mode 100644
index 0000000..cd85858
--- /dev/null
+++ b/tests/ed25519/ed25519-1.spki.der
@@ -0,0 +1 @@
+MCwwBwYDK2VwBQADIQDrisJrXJ7wJ5474+giYqk7zhb+WO5CJQDTjK9GHGWjtg==
diff --git a/tests/ed25519/ed25519-2.pk8.der b/tests/ed25519/ed25519-2.pk8.der
new file mode 100644
index 0000000..2e0687f
--- /dev/null
+++ b/tests/ed25519/ed25519-2.pk8.der
Binary files differ
diff --git a/tests/ed25519/ed25519-3.pk8.der b/tests/ed25519/ed25519-3.pk8.der
new file mode 100644
index 0000000..1b4cbed
--- /dev/null
+++ b/tests/ed25519/ed25519-3.pk8.der
Binary files differ
diff --git a/tests/ed25519/ed25519-4.pk8.der b/tests/ed25519/ed25519-4.pk8.der
new file mode 100644
index 0000000..3e458e7
--- /dev/null
+++ b/tests/ed25519/ed25519-4.pk8.der
Binary files differ
diff --git a/tests/rsa/gen.sh b/tests/rsa/gen.sh
new file mode 100755
index 0000000..b6534f1
--- /dev/null
+++ b/tests/rsa/gen.sh
@@ -0,0 +1,39 @@
+#!/bin/bash
+set -eux
+
+cd "$(dirname "$0")"
+
+for key_size in 2048 4096; do
+ key="rsa-$key_size"
+ pk8="$key.pk8.der"
+ spki="$key.spki.der"
+ pkcs1="$key.pkcs1.der"
+ key="$key.der"
+
+ if [ ! -f "$key" ]; then
+ openssl genpkey -algorithm RSA \
+ -pkeyopt "rsa_keygen_bits:$key_size" \
+ -pkeyopt rsa_keygen_pubexp:65537 \
+ -outform der \
+ -out "$key"
+ fi
+
+ openssl rsa -in "$key" \
+ -inform der \
+ -RSAPublicKey_out \
+ -outform der \
+ -out "$pkcs1"
+
+ openssl rsa -in "$key" \
+ -inform der \
+ -pubout \
+ -outform der \
+ -out "$spki"
+
+ openssl pkcs8 -topk8 \
+ -inform der \
+ -in "$key" \
+ -outform der \
+ -out "$pk8" \
+ -nocrypt
+done
diff --git a/tests/rsa/pkcs1-1.pub b/tests/rsa/pkcs1-1.pub
deleted file mode 100644
index 7ca5280..0000000
--- a/tests/rsa/pkcs1-1.pub
+++ /dev/null
@@ -1,8 +0,0 @@
------BEGIN RSA PUBLIC KEY-----
-MIIBCgKCAQEAndqFFxVnq7FjihMB7sBjfMMIS2112HB0PauY7wDQ8gTnfbWkCOOQ
-2kvh0f8Ph41rQ1iZiPaZq+eQ+yqhPCsPJKSeq9H8oOCon4JI5afSTUTkC0NmA1SN
-3cMMJvUgNr+uBWOc+IHr90o6xBTuzplmnzzjGCGNaOMLt7P3yuF8q9UXb1DBOBvq
-YutGB5UB+9Ma0K4c5lMnUy0IVb6j1tECFKSi4RTepA5UAOV5LE2T6Gs89kRjhTxv
-VsKAAj92z3VGX5pJR9zm6ZrAbjSen9LfvFWgd2Hz1Qy4d9Jm0iSaJb5VG06/O4JM
-T1FXfIv2OP5NlzKoyDxp5ZEVLHWNksHHawIDAQAB
------END RSA PUBLIC KEY-----
diff --git a/tests/rsa/pkcs1-2.pub b/tests/rsa/pkcs1-2.pub
deleted file mode 100644
index 47701da..0000000
--- a/tests/rsa/pkcs1-2.pub
+++ /dev/null
@@ -1,13 +0,0 @@
------BEGIN RSA PUBLIC KEY-----
-MIICCgKCAgEA6si2eKLhg5EwZajX8b4PpLmPnnhJElIEadkb7nJnW4kCk5QO5TwH
-tfRS7snPULxH6gDiTMKBMRMNOw9wXOwNZ/DTLeaKs7ScJS24tLnjwI8dEj5h0N8n
-eBvkFrStEFoNxy0unRJHWnnc+j3TdCWf5k1VbZWS5fREECtq767fDBs/l55iI176
-6x+vSPbdt0mXsTiFLf4eT1ISskvToG7/K3TkTyTUYLVdCfOYnWgsbgjiemjwzoyW
-CgxHQGSh6CaGWccvczp1WNL757VP/HzpsmVE/mr0T6V3k/lGAp0mEbjyJKcnk2E5
-LaLjSqfk+b4yJgmQXP68HOLRWL57bUgZnx85GpHpQfE0MX0OmuwQsbpQxxiErSax
-3Lrh3sWsa0dUIggb0/UWKir6omoMwjYcqR4Sfp2vI0yjusLah3nF6bV6b+zgA/Zy
-+tffsW7ntxjOY/d8hQRJtKmEJ54XJSCEOay6E1NfCj/G9UGNuKt/rnhWghkxoOuG
-lDAo5O4UCjwyDavinAb/3E2Q6tSZJR0g6z7mNBDZewmViHB+5b9demT/xaloyxe0
-QrevtTlh/NbytEki0NQgXE/6sXHd/R0Y3wqDtggWZbeWTqSD+HiyglPzgLULpXMP
-NhO05bhlbHnP4srkacjBFSfQYcl7odkQZeqFbCk+k0/cYYQ+6bd2IpUCAwEAAQ==
------END RSA PUBLIC KEY-----
diff --git a/tests/rsa/rsa-2048 b/tests/rsa/rsa-2048
new file mode 100644
index 0000000..a2eec5b
--- /dev/null
+++ b/tests/rsa/rsa-2048
Binary files differ
diff --git a/tests/rsa/rsa-2048.der b/tests/rsa/rsa-2048.der
new file mode 100644
index 0000000..a932df0
--- /dev/null
+++ b/tests/rsa/rsa-2048.der
Binary files differ
diff --git a/tests/rsa/rsa-2048.pk8.der b/tests/rsa/rsa-2048.pk8.der
new file mode 100644
index 0000000..728aaf3
--- /dev/null
+++ b/tests/rsa/rsa-2048.pk8.der
Binary files differ
diff --git a/tests/rsa/rsa-2048.pkcs1.der b/tests/rsa/rsa-2048.pkcs1.der
new file mode 100644
index 0000000..9793d7e
--- /dev/null
+++ b/tests/rsa/rsa-2048.pkcs1.der
Binary files differ
diff --git a/tests/rsa/rsa-2048.spki.der b/tests/rsa/rsa-2048.spki.der
new file mode 100644
index 0000000..f57e69d
--- /dev/null
+++ b/tests/rsa/rsa-2048.spki.der
Binary files differ
diff --git a/tests/rsa/rsa-4096.der b/tests/rsa/rsa-4096.der
new file mode 100644
index 0000000..44cbedb
--- /dev/null
+++ b/tests/rsa/rsa-4096.der
Binary files differ
diff --git a/tests/rsa/rsa-4096.pk8.der b/tests/rsa/rsa-4096.pk8.der
new file mode 100644
index 0000000..35a8247
--- /dev/null
+++ b/tests/rsa/rsa-4096.pk8.der
Binary files differ
diff --git a/tests/rsa/rsa-4096.pkcs1.der b/tests/rsa/rsa-4096.pkcs1.der
new file mode 100644
index 0000000..9295614
--- /dev/null
+++ b/tests/rsa/rsa-4096.pkcs1.der
Binary files differ
diff --git a/tests/rsa/rsa-4096.spki.der b/tests/rsa/rsa-4096.spki.der
new file mode 100644
index 0000000..2503d55
--- /dev/null
+++ b/tests/rsa/rsa-4096.spki.der
Binary files differ
diff --git a/tests/rsa/spki-1.pub b/tests/rsa/spki-1.pub
deleted file mode 100644
index 44b0ef9..0000000
--- a/tests/rsa/spki-1.pub
+++ /dev/null
@@ -1,9 +0,0 @@
------BEGIN PUBLIC KEY-----
-MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAndqFFxVnq7FjihMB7sBj
-fMMIS2112HB0PauY7wDQ8gTnfbWkCOOQ2kvh0f8Ph41rQ1iZiPaZq+eQ+yqhPCsP
-JKSeq9H8oOCon4JI5afSTUTkC0NmA1SN3cMMJvUgNr+uBWOc+IHr90o6xBTuzplm
-nzzjGCGNaOMLt7P3yuF8q9UXb1DBOBvqYutGB5UB+9Ma0K4c5lMnUy0IVb6j1tEC
-FKSi4RTepA5UAOV5LE2T6Gs89kRjhTxvVsKAAj92z3VGX5pJR9zm6ZrAbjSen9Lf
-vFWgd2Hz1Qy4d9Jm0iSaJb5VG06/O4JMT1FXfIv2OP5NlzKoyDxp5ZEVLHWNksHH
-awIDAQAB
------END PUBLIC KEY-----
diff --git a/tests/rsa/spki-2.pub b/tests/rsa/spki-2.pub
deleted file mode 100644
index e4d767e..0000000
--- a/tests/rsa/spki-2.pub
+++ /dev/null
@@ -1,14 +0,0 @@
------BEGIN PUBLIC KEY-----
-MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA6si2eKLhg5EwZajX8b4P
-pLmPnnhJElIEadkb7nJnW4kCk5QO5TwHtfRS7snPULxH6gDiTMKBMRMNOw9wXOwN
-Z/DTLeaKs7ScJS24tLnjwI8dEj5h0N8neBvkFrStEFoNxy0unRJHWnnc+j3TdCWf
-5k1VbZWS5fREECtq767fDBs/l55iI1766x+vSPbdt0mXsTiFLf4eT1ISskvToG7/
-K3TkTyTUYLVdCfOYnWgsbgjiemjwzoyWCgxHQGSh6CaGWccvczp1WNL757VP/Hzp
-smVE/mr0T6V3k/lGAp0mEbjyJKcnk2E5LaLjSqfk+b4yJgmQXP68HOLRWL57bUgZ
-nx85GpHpQfE0MX0OmuwQsbpQxxiErSax3Lrh3sWsa0dUIggb0/UWKir6omoMwjYc
-qR4Sfp2vI0yjusLah3nF6bV6b+zgA/Zy+tffsW7ntxjOY/d8hQRJtKmEJ54XJSCE
-Oay6E1NfCj/G9UGNuKt/rnhWghkxoOuGlDAo5O4UCjwyDavinAb/3E2Q6tSZJR0g
-6z7mNBDZewmViHB+5b9demT/xaloyxe0QrevtTlh/NbytEki0NQgXE/6sXHd/R0Y
-3wqDtggWZbeWTqSD+HiyglPzgLULpXMPNhO05bhlbHnP4srkacjBFSfQYcl7odkQ
-ZeqFbCk+k0/cYYQ+6bd2IpUCAwEAAQ==
------END PUBLIC KEY-----
diff --git a/tests/simple_example.rs b/tests/simple_example.rs
new file mode 100644
index 0000000..cc8b691
--- /dev/null
+++ b/tests/simple_example.rs
@@ -0,0 +1,199 @@
+extern crate chrono;
+extern crate tuf;
+
+use chrono::prelude::*;
+use chrono::offset::Utc;
+use std::collections::{HashSet, HashMap};
+use tuf::{Tuf, Error};
+use tuf::client::{Client, Config};
+use tuf::crypto::{PrivateKey, SignatureScheme, KeyId};
+use tuf::interchange::JsonDataInterchange;
+use tuf::metadata::{RoleDefinition, RootMetadata, Role, MetadataVersion, MetadataPath,
+ SignedMetadata, TargetDescription, TargetPath, TargetsMetadata,
+ MetadataDescription, SnapshotMetadata, TimestampMetadata};
+use tuf::repository::{EphemeralRepository, Repository};
+
+// Ironically, this is far from simple, but it's as simple as it can be made.
+
+const ED25519_1_PK8: &'static [u8] = include_bytes!("./ed25519/ed25519-1.pk8.der");
+const ED25519_2_PK8: &'static [u8] = include_bytes!("./ed25519/ed25519-2.pk8.der");
+const ED25519_3_PK8: &'static [u8] = include_bytes!("./ed25519/ed25519-3.pk8.der");
+const ED25519_4_PK8: &'static [u8] = include_bytes!("./ed25519/ed25519-4.pk8.der");
+
+#[test]
+fn main() {
+ let mut remote = EphemeralRepository::<JsonDataInterchange>::new();
+ let root_key_ids = init_server(&mut remote).unwrap();
+ init_client(root_key_ids, remote).unwrap();
+}
+
+fn init_client(
+ root_key_ids: Vec<KeyId>,
+ mut remote: EphemeralRepository<JsonDataInterchange>,
+) -> Result<(), Error> {
+ let local = EphemeralRepository::<JsonDataInterchange>::new();
+ let config = Config::build().finish()?;
+ let root = remote.fetch_metadata(
+ &Role::Root,
+ &MetadataPath::from_role(&Role::Root),
+ &MetadataVersion::None,
+ config.max_root_size(),
+ None,
+ )?;
+
+ let tuf = Tuf::<JsonDataInterchange>::from_root_pinned(root, &root_key_ids)?;
+ let mut client = Client::new(tuf, config, local, remote)?;
+ match client.update_local() {
+ Ok(_) => (),
+ Err(e) => println!("{:?}", e),
+ }
+ let _ = client.update_remote()?;
+ client.fetch_target(&TargetPath::new("grendel".into())?)
+}
+
+fn init_server(remote: &mut EphemeralRepository<JsonDataInterchange>) -> Result<Vec<KeyId>, Error> {
+ // in real life, you wouldn't want these keys on the same machine ever
+ let root_key = PrivateKey::from_pkcs8(ED25519_1_PK8)?;
+ let snapshot_key = PrivateKey::from_pkcs8(ED25519_2_PK8)?;
+ let targets_key = PrivateKey::from_pkcs8(ED25519_3_PK8)?;
+ let timestamp_key = PrivateKey::from_pkcs8(ED25519_4_PK8)?;
+
+ //// build the root ////
+
+ let keys = vec![
+ root_key.public().clone(),
+ snapshot_key.public().clone(),
+ targets_key.public().clone(),
+ timestamp_key.public().clone(),
+ ];
+
+ let mut key_ids = HashSet::new();
+ key_ids.insert(root_key.key_id().clone());
+ let root_def = RoleDefinition::new(1, key_ids)?;
+
+ let mut key_ids = HashSet::new();
+ key_ids.insert(snapshot_key.key_id().clone());
+ let snapshot_def = RoleDefinition::new(1, key_ids)?;
+
+ let mut key_ids = HashSet::new();
+ key_ids.insert(targets_key.key_id().clone());
+ let targets_def = RoleDefinition::new(1, key_ids)?;
+
+ let mut key_ids = HashSet::new();
+ key_ids.insert(timestamp_key.key_id().clone());
+ let timestamp_def = RoleDefinition::new(1, key_ids)?;
+
+ let root = RootMetadata::new(
+ 1,
+ Utc.ymd(2038, 1, 1).and_hms(0, 0, 0),
+ false,
+ keys,
+ root_def,
+ snapshot_def,
+ targets_def,
+ timestamp_def,
+ )?;
+
+ let signed = SignedMetadata::<JsonDataInterchange, RootMetadata>::new(
+ &root,
+ &root_key,
+ SignatureScheme::Ed25519,
+ )?;
+
+ remote.store_metadata(
+ &Role::Root,
+ &MetadataPath::new("root".into())?,
+ &MetadataVersion::Number(1),
+ &signed,
+ )?;
+ remote.store_metadata(
+ &Role::Root,
+ &MetadataPath::new("root".into())?,
+ &MetadataVersion::None,
+ &signed,
+ )?;
+
+ //// build the targets ////
+
+ let target_file: &[u8] = b"things fade, alternatives exclude";
+ let target_path = TargetPath::new("grendel".into())?;
+ let target_description = TargetDescription::from_reader(target_file)?;
+ let _ = remote.store_target(target_file, &target_path, &target_description);
+
+ let mut target_map = HashMap::new();
+ let _ = target_map.insert(target_path, target_description);
+ let targets = TargetsMetadata::new(1, Utc.ymd(2038, 1, 1).and_hms(0, 0, 0), target_map)?;
+
+ let signed = SignedMetadata::<JsonDataInterchange, TargetsMetadata>::new(
+ &targets,
+ &targets_key,
+ SignatureScheme::Ed25519,
+ )?;
+
+ remote.store_metadata(
+ &Role::Targets,
+ &MetadataPath::new("targets".into())?,
+ &MetadataVersion::Number(1),
+ &signed,
+ )?;
+ remote.store_metadata(
+ &Role::Targets,
+ &MetadataPath::new("targets".into())?,
+ &MetadataVersion::None,
+ &signed,
+ )?;
+
+ //// build the snapshot ////
+ let mut meta_map = HashMap::new();
+ let path = MetadataPath::new("targets".into())?;
+ let desc = MetadataDescription::new(1)?;
+ let _ = meta_map.insert(path, desc);
+ let snapshot = SnapshotMetadata::new(1, Utc.ymd(2038, 1, 1).and_hms(0, 0, 0), meta_map)?;
+
+ let signed = SignedMetadata::<JsonDataInterchange, SnapshotMetadata>::new(
+ &snapshot,
+ &snapshot_key,
+ SignatureScheme::Ed25519,
+ )?;
+
+ remote.store_metadata(
+ &Role::Snapshot,
+ &MetadataPath::new("snapshot".into())?,
+ &MetadataVersion::Number(1),
+ &signed,
+ )?;
+ remote.store_metadata(
+ &Role::Snapshot,
+ &MetadataPath::new("snapshot".into())?,
+ &MetadataVersion::None,
+ &signed,
+ )?;
+
+ //// build the timestamp ////
+ let mut meta_map = HashMap::new();
+ let path = MetadataPath::new("snapshot".into())?;
+ let desc = MetadataDescription::new(1)?;
+ let _ = meta_map.insert(path, desc);
+ let timestamp = TimestampMetadata::new(1, Utc.ymd(2038, 1, 1).and_hms(0, 0, 0), meta_map)?;
+
+ let signed = SignedMetadata::<JsonDataInterchange, TimestampMetadata>::new(
+ ×tamp,
+ ×tamp_key,
+ SignatureScheme::Ed25519,
+ )?;
+
+ remote.store_metadata(
+ &Role::Timestamp,
+ &MetadataPath::new("timestamp".into())?,
+ &MetadataVersion::Number(1),
+ &signed,
+ )?;
+ remote.store_metadata(
+ &Role::Timestamp,
+ &MetadataPath::new("timestamp".into())?,
+ &MetadataVersion::None,
+ &signed,
+ )?;
+
+ Ok(vec![root_key.key_id().clone()])
+}
diff --git a/tests/tuf-test-vectors b/tests/tuf-test-vectors
deleted file mode 160000
index 6e17614..0000000
--- a/tests/tuf-test-vectors
+++ /dev/null
@@ -1 +0,0 @@
-Subproject commit 6e176149be1b0b05df567658173d8149aa1df57e
diff --git a/tests/vectors.rs b/tests/vectors.rs
deleted file mode 100644
index 95a5215..0000000
--- a/tests/vectors.rs
+++ /dev/null
@@ -1,339 +0,0 @@
-extern crate data_encoding;
-extern crate pem;
-extern crate serde;
-#[macro_use]
-extern crate serde_derive;
-extern crate serde_json as json;
-extern crate tempdir;
-extern crate tuf;
-extern crate url;
-
-use data_encoding::HEXLOWER;
-use std::fs::{self, File, DirEntry};
-use std::io::{self, Read};
-use std::path::{PathBuf, Path};
-use std::str;
-use tempdir::TempDir;
-use tuf::{Tuf, Config, Error, RemoteRepo};
-use tuf::meta::{Key, KeyValue, KeyType};
-use url::Url;
-
-
-fn load_vector_meta() -> String {
- let path = PathBuf::from(env!("CARGO_MANIFEST_DIR"))
- .join("tests")
- .join("tuf-test-vectors")
- .join("tuf")
- .join("vector-meta.json");
- let mut file = File::open(path).expect("couldn't open vector meta");
- let mut buf = String::new();
- file.read_to_string(&mut buf).expect("couldn't read vector meta");
- buf
-}
-
-#[derive(Deserialize)]
-struct VectorMeta {
- vectors: Vec<VectorMetaEntry>,
-}
-
-#[derive(Deserialize)]
-struct VectorMetaEntry {
- repo: String,
- error: Option<String>,
- is_success: bool,
- root_keys: Vec<RootKeyData>,
-}
-
-#[derive(Deserialize)]
-struct RootKeyData {
- path: String,
- #[serde(rename = "type")]
- typ: String,
-}
-
-enum TestType {
- File,
- Http
-}
-
-fn ensure_empty(path: &Path) {
- if !path.is_dir() {
- panic!("Path wasn't a dir: {:?}", path)
- }
-
- let res = fs::read_dir(path).expect("couldn't read dir").collect::<Vec<io::Result<DirEntry>>>();
- if !res.is_empty() {
- panic!("Temp dir not empty: {:?}", res)
- }
- if !res.iter().all(|x| x.is_ok()) {
- panic!("Temp dir errors: {:?}", res)
- }
-}
-
-fn run_test_vector(test_path: &str, test_type: TestType, pin_root_keys: bool) {
- let temp_dir = TempDir::new("rust-tuf").expect("couldn't make temp dir");
- let temp_path = temp_dir.into_path();
-
- println!("Temp dir is: {:?}", temp_path);
-
- let vector_meta: VectorMeta = json::from_str(&load_vector_meta())
- .expect("couldn't deserializd meta");
-
- let test_vector = vector_meta.vectors
- .iter()
- .filter(|v| v.repo == test_path)
- .collect::<Vec<&VectorMetaEntry>>()
- .pop()
- .expect(format!("No repo named {}", test_path).as_str());
-
- let vector_path = PathBuf::from(env!("CARGO_MANIFEST_DIR"))
- .join("tests")
- .join("tuf-test-vectors")
- .join("tuf")
- .join(test_vector.repo.clone());
-
- println!("The test vector path is: {}",
- vector_path.to_string_lossy().into_owned());
-
- let config = match test_type {
- TestType::File => Config::build()
- .remote(RemoteRepo::File(vector_path.join("repo"))),
- TestType::Http => Config::build()
- .remote(RemoteRepo::Http(Url::parse(
- &format!("http://localhost:8080/{}/repo", test_path)).expect("bad url"))),
- }.local_path(temp_path.clone())
- .finish()
- .expect("bad config");
-
- let tuf = if pin_root_keys {
- let root_keys = test_vector.root_keys
- .iter()
- .map(|k| {
- let file_path = vector_path.join("keys").join(k.path.clone());
- let mut file = File::open(file_path)
- .expect("couldn't open file");
- let mut buf = Vec::new();
- file.read_to_end(&mut buf).expect("couldn't read key");
-
- let len = buf.len();
- if buf[len - 1] == b'\n' {
- buf.truncate(len - 1)
- }
-
- let key = str::from_utf8(&buf).expect("not utf-8").to_string();
-
- match k.typ.as_ref() {
- "ed25519" => {
- let val = HEXLOWER.decode(key.replace("\n", "").as_ref())
- .expect("key value not hex");
- Key {
- typ: KeyType::Ed25519,
- value: KeyValue {
- typ: KeyType::Ed25519,
- value: val,
- original: key,
- },
- }
- }
- "rsa" => {
- let val = pem::parse(key.clone())
- .expect("key value not pem");
- Key {
- typ: KeyType::Rsa,
- value: KeyValue {
- typ: KeyType::Rsa,
- value: val.contents,
- original: key,
- },
- }
- }
- x => panic!("unknown key type: {}", x),
- }
- })
- .collect();
- Tuf::from_root_keys(root_keys, config)
- } else {
- Tuf::initialize(&temp_path)
- .expect("failed to initialize");
- fs::copy(vector_path.join("repo").join("1.root.json"),
- temp_path.join("metadata").join("current").join("root.json"))
- .expect("failed to copy root.json");
- Tuf::new(config)
- };
-
- match (tuf, &test_vector.error) {
- (Ok(ref tuf), &None) => {
- // first time pulls remote
- assert_eq!(tuf.fetch_target("targets/file.txt").map(|_| ()), Ok(()));
- assert!(temp_path.join("targets").join("targets").join("file.txt").exists());
- // second time pulls local
- assert_eq!(tuf.fetch_target("targets/file.txt").map(|_| ()), Ok(()));
- }
-
- (Ok(ref tuf), &Some(ref err)) if err == &"TargetHashMismatch".to_string() => {
- assert_eq!(tuf.fetch_target("targets/file.txt").map(|_| ()),
- Err(Error::UnavailableTarget));
- }
-
- (Ok(ref tuf), &Some(ref err)) if err == &"OversizedTarget".to_string() => {
- assert_eq!(tuf.fetch_target("targets/file.txt").map(|_| ()),
- Err(Error::UnavailableTarget));
- }
-
- (Err(Error::ExpiredMetadata(ref role)), &Some(ref err))
- if err.starts_with("ExpiredMetadata::") => {
- assert!(err.to_lowercase()
- .ends_with(role.to_string().as_str()),
- format!("Role: {}, err: {}", role, err))
- }
-
- (Err(Error::UnmetThreshold(_)), &Some(ref err))
- if err == &"IllegalRsaKeySize".to_string() => {
- ()
- }
-
- (Err(Error::UnmetThreshold(ref role)), &Some(ref err))
- if err.starts_with("UnmetThreshold::") => {
- assert!(err.to_lowercase()
- .ends_with(role.to_string().as_str()),
- format!("Role: {}, err: {}", role, err))
- }
-
- (Err(Error::MetadataHashMismatch(ref role)), &Some(ref err))
- if err.starts_with("MetadataHashMismatch::") => {
- assert!(err.to_lowercase()
- .ends_with(role.to_string().as_str()),
- format!("Role: {}, err: {}", role, err))
- }
-
- (Err(Error::OversizedMetadata(ref role)), &Some(ref err))
- if err.starts_with("OversizedMetadata::") => {
- assert!(err.to_lowercase()
- .ends_with(role.to_string().as_str()),
- format!("Role: {}, err: {}", role, err))
- }
-
- // we're using a json error because the threshold is checked in the deserializer
- // this may need to change in the future
- (Err(Error::Json(ref msg)), &Some(ref err)) if err.starts_with("IllegalThreshold::") => {
- let role = err.split("::").last().unwrap();
-
- assert!(msg.contains("threshold"),
- format!("Role: {}, err: {}", role, err));
- assert!(err.to_lowercase()
- .contains(role.to_lowercase().as_str()),
- format!("Role: {}, err: {}", role, err))
- }
-
- (Err(Error::NonUniqueSignatures(ref role)), &Some(ref err)) if err.starts_with("NonUniqueSignatures::") => {
- assert!(err.to_lowercase()
- .ends_with(role.to_string().as_str()),
- format!("Role: {}, err: {}", role, err))
- }
-
- (Ok(ref tuf), &Some(ref err)) if err == &"UnavailableTarget".to_string() => {
- assert_eq!(tuf.fetch_target("targets/file.txt").map(|_| ()),
- Err(Error::UnavailableTarget));
- }
-
- (Ok(ref tuf), &Some(ref err))
- if err == &"UnmetThreshold::Delegation".to_string() => {
- assert_eq!(tuf.fetch_target("targets/file.txt").map(|_| ()), Err(Error::UnavailableTarget));
- }
-
- x => panic!("Unexpected failures: {:?}", x),
- }
- ensure_empty(&temp_path.join("temp"));
- if !test_vector.is_success {
- ensure_empty(&temp_path.join("targets"))
- }
-}
-
-
-macro_rules! test_cases {
- ($name: expr, $md: ident) => {
- mod $md {
- use $crate::{run_test_vector, TestType};
-
- #[test]
- fn file_pinned() {
- run_test_vector($name, TestType::File, true)
- }
-
- #[test]
- fn file_unpinned() {
- run_test_vector($name, TestType::File, false)
- }
-
- // TODO no idea how windows shell scipting works
- #[cfg(not(windows))]
- #[test]
- fn http_pinned() {
- run_test_vector($name, TestType::Http, true)
- }
-
- // TODO no idea how windows shell scipting works
- #[cfg(not(windows))]
- #[test]
- fn http_unpinned() {
- run_test_vector($name, TestType::Http, false)
- }
- }
- }
-}
-
-test_cases!("001", _001);
-test_cases!("002", _002);
-test_cases!("003", _003);
-test_cases!("004", _004);
-test_cases!("005", _005);
-test_cases!("006", _006);
-test_cases!("007", _007);
-test_cases!("008", _008);
-test_cases!("009", _009);
-test_cases!("010", _010);
-test_cases!("011", _011);
-test_cases!("012", _012);
-test_cases!("013", _013);
-test_cases!("014", _014);
-test_cases!("015", _015);
-test_cases!("016", _016);
-test_cases!("017", _017);
-test_cases!("018", _018);
-test_cases!("019", _019);
-test_cases!("020", _020);
-test_cases!("021", _021);
-test_cases!("022", _022);
-test_cases!("023", _023);
-test_cases!("024", _024);
-test_cases!("025", _025);
-test_cases!("026", _026);
-test_cases!("027", _027);
-test_cases!("028", _028);
-test_cases!("029", _029);
-test_cases!("030", _030);
-test_cases!("031", _031);
-test_cases!("032", _032);
-test_cases!("033", _033);
-test_cases!("034", _034);
-test_cases!("035", _035);
-test_cases!("036", _036);
-test_cases!("037", _037);
-test_cases!("038", _038);
-test_cases!("039", _039);
-test_cases!("040", _040);
-test_cases!("041", _041);
-test_cases!("042", _042);
-test_cases!("043", _043);
-test_cases!("044", _044);
-test_cases!("045", _045);
-test_cases!("046", _046);
-test_cases!("047", _047);
-test_cases!("048", _048);
-test_cases!("049", _049);
-test_cases!("050", _050);
-test_cases!("051", _051);
-test_cases!("052", _052);
-test_cases!("053", _053);
-test_cases!("054", _054);
-test_cases!("055", _055);