diff --git a/Cargo.lock b/Cargo.lock index 940f8610..9a8632ae 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -944,6 +944,24 @@ dependencies = [ "vcpkg", ] +[[package]] +name = "path-absolutize" +version = "3.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b288298a7a3a7b42539e3181ba590d32f2d91237b0691ed5f103875c754b3bf5" +dependencies = [ + "path-dedot", +] + +[[package]] +name = "path-dedot" +version = "3.0.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4bfa72956f6be8524f7f7e2b07972dda393cb0008a6df4451f658b7e1bd1af80" +dependencies = [ + "once_cell", +] + [[package]] name = "pem" version = "1.0.0" @@ -1760,7 +1778,7 @@ dependencies = [ [[package]] name = "tough" -version = "0.11.3" +version = "0.12.0" dependencies = [ "chrono", "dyn-clone", @@ -1769,8 +1787,11 @@ dependencies = [ "hex-literal", "httptest", "log", + "maplit", "olpc-cjson", + "path-absolutize", "pem", + "percent-encoding", "reqwest", "ring", "serde", @@ -1785,7 +1806,7 @@ dependencies = [ [[package]] name = "tough-kms" -version = "0.3.3" +version = "0.3.4" dependencies = [ "base64", "bytes", @@ -1804,7 +1825,7 @@ dependencies = [ [[package]] name = "tough-ssm" -version = "0.6.3" +version = "0.6.4" dependencies = [ "rusoto_core", "rusoto_credential", @@ -1856,7 +1877,7 @@ checksum = "59547bce71d9c38b83d9c0e92b6066c4253371f15005def0c30d9657f50c7642" [[package]] name = "tuftool" -version = "0.6.4" +version = "0.7.0" dependencies = [ "assert_cmd", "chrono", diff --git a/tough-kms/CHANGELOG.md b/tough-kms/CHANGELOG.md index 6ff6aee7..7d468fb3 100644 --- a/tough-kms/CHANGELOG.md +++ b/tough-kms/CHANGELOG.md @@ -4,6 +4,10 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [0.3.4] - 2021-10-19 +### Changes +- Update dependencies. + ## [0.3.3] - 2021-09-15 ### Changes - Update dependencies. @@ -60,6 +64,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Added - Everything! +[0.3.4]: https://github.com/awslabs/tough/compare/tough-kms-v0.3.3...tough-kms-v0.3.4 [0.3.3]: https://github.com/awslabs/tough/compare/tough-kms-v0.3.2...tough-kms-v0.3.3 [0.3.2]: https://github.com/awslabs/tough/compare/tough-kms-v0.3.1...tough-kms-v0.3.2 [0.3.1]: https://github.com/awslabs/tough/compare/tough-kms-v0.3.0...tough-kms-v0.3.1 diff --git a/tough-kms/Cargo.toml b/tough-kms/Cargo.toml index 4774f8ee..bb961e81 100644 --- a/tough-kms/Cargo.toml +++ b/tough-kms/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "tough-kms" -version = "0.3.3" +version = "0.3.4" description = "Implements AWS KMS as a key source for TUF signing keys" authors = ["Shailesh Gothi "] license = "MIT OR Apache-2.0" @@ -15,7 +15,7 @@ rusoto-native-tls = ["rusoto_core/native-tls", "rusoto_credential", "rusoto_kms/ rusoto-rustls = ["rusoto_core/rustls", "rusoto_credential", "rusoto_kms/rustls"] [dependencies] -tough = { version = "0.11.3", path = "../tough", features = ["http"] } +tough = { version = "0.12.0", path = "../tough", features = ["http"] } ring = { version = "0.16.16", features = ["std"] } rusoto_core = { version = "0.47", optional = true, default-features = false } rusoto_credential = { version = "0.47", optional = true } diff --git a/tough-ssm/CHANGELOG.md b/tough-ssm/CHANGELOG.md index 9c863fd1..f176be15 100644 --- a/tough-ssm/CHANGELOG.md +++ b/tough-ssm/CHANGELOG.md @@ -4,6 +4,10 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [0.6.4] - 2021-10-19 +### Changes +- Update dependencies. + ## [0.6.3] - 2021-09-15 ### Changes - Update dependencies. @@ -69,6 +73,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Added - Everything! +[0.6.4]: https://github.com/awslabs/tough/compare/tough-ssm-v0.6.3...tough-ssm-v0.6.4 [0.6.3]: https://github.com/awslabs/tough/compare/tough-ssm-v0.6.2...tough-ssm-v0.6.3 [0.6.2]: https://github.com/awslabs/tough/compare/tough-ssm-v0.6.1...tough-ssm-v0.6.2 [0.6.1]: https://github.com/awslabs/tough/compare/tough-ssm-v0.6.0...tough-ssm-v0.6.1 diff --git a/tough-ssm/Cargo.toml b/tough-ssm/Cargo.toml index 8fb80621..5a0c0238 100644 --- a/tough-ssm/Cargo.toml +++ b/tough-ssm/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "tough-ssm" -version = "0.6.3" +version = "0.6.4" description = "Implements AWS SSM as a key source for TUF signing keys" authors = ["Zac Mrowicki "] license = "MIT OR Apache-2.0" @@ -15,7 +15,7 @@ rusoto-native-tls = ["rusoto_core/native-tls", "rusoto_credential", "rusoto_ssm/ rusoto-rustls = ["rusoto_core/rustls", "rusoto_credential", "rusoto_ssm/rustls"] [dependencies] -tough = { version = "0.11.3", path = "../tough", features = ["http"] } +tough = { version = "0.12.0", path = "../tough", features = ["http"] } rusoto_core = { version = "0.47", optional = true, default-features = false } rusoto_credential = { version = "0.47", optional = true } rusoto_ssm = { version = "0.47", optional = true, default-features = false } diff --git a/tough/CHANGELOG.md b/tough/CHANGELOG.md index a0e3e0e3..883f0d55 100644 --- a/tough/CHANGELOG.md +++ b/tough/CHANGELOG.md @@ -4,6 +4,15 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [0.12.0] - 2021-10-19 +### Breaking Changes +- Target names are now specified with a struct, `TargetName`, instead of `String`. + +### Changes +- Update dependencies. +- Fix an issue where delegated role names with path traversal constructs could cause files to be written in unexpected locations. +- Fix a similar issue with path traversal constructs in target names. + ## [0.11.3] - 2021-09-15 ### Changes - Update dependencies. @@ -149,7 +158,8 @@ For changes that require modification of calling code see #120 and #121. ### Added - Everything! -[Unreleased]: https://github.com/awslabs/tough/compare/tough-v0.11.3...HEAD +[Unreleased]: https://github.com/awslabs/tough/compare/tough-v0.12.0...HEAD +[0.12.0]: https://github.com/awslabs/tough/compare/tough-v0.11.3...tough-v0.12.0 [0.11.3]: https://github.com/awslabs/tough/compare/tough-v0.11.2...tough-v0.11.3 [0.11.2]: https://github.com/awslabs/tough/compare/tough-v0.11.1...tough-v0.11.2 [0.11.1]: https://github.com/awslabs/tough/compare/tough-v0.11.0...tough-v0.11.1 diff --git a/tough/Cargo.toml b/tough/Cargo.toml index 39cbb408..c2d8caed 100644 --- a/tough/Cargo.toml +++ b/tough/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "tough" -version = "0.11.3" +version = "0.12.0" description = "The Update Framework (TUF) repository client" authors = ["iliana destroyer of worlds "] license = "MIT OR Apache-2.0" @@ -15,7 +15,9 @@ globset = { version = "0.4.8" } hex = "0.4.2" log = "0.4.8" olpc-cjson = { version = "0.1.0", path = "../olpc-cjson" } +path-absolutize = "3" pem = "1.0.0" +percent-encoding = "2" reqwest = { version = "0.11.1", optional = true, default-features = false, features = ["blocking"] } ring = { version = "0.16.16", features = ["std"] } serde = { version = "1.0.125", features = ["derive"] } @@ -30,6 +32,7 @@ walkdir = "2.3.2" [dev-dependencies] hex-literal = "0.3.3" httptest = "0.15" +maplit = "1.0.1" [features] http = ["reqwest"] diff --git a/tough/src/cache.rs b/tough/src/cache.rs index 8fbe4b97..85e7272a 100644 --- a/tough/src/cache.rs +++ b/tough/src/cache.rs @@ -1,9 +1,8 @@ use crate::error::{self, Result}; use crate::fetch::{fetch_max_size, fetch_sha256}; use crate::schema::{RoleType, Target}; -use crate::Repository; +use crate::{encode_filename, Prefix, Repository, TargetName}; use snafu::{OptionExt, ResultExt}; -use std::fs::OpenOptions; use std::io::{Read, Write}; use std::path::Path; @@ -38,8 +37,9 @@ impl Repository { // Fetch targets and save them to the outdir if let Some(target_list) = targets_subset { - for target_name in target_list.iter() { - self.cache_target(&targets_outdir, target_name.as_ref())?; + for raw_name in target_list.iter() { + let target_name = TargetName::new(raw_name.as_ref())?; + self.cache_target(&targets_outdir, &target_name)?; } } else { let targets = &self.targets.signed.targets_map(); @@ -162,10 +162,10 @@ impl Repository { .meta .get(&format!("{}.json", name))? .version, - name + encode_filename(name) )) } else { - Some(format!("{}.json", name)) + Some(format!("{}.json", encode_filename(name))) } } @@ -203,24 +203,16 @@ impl Repository { /// Saves a signed target to the specified `outdir`. Retains the digest-prepended filename if /// consistent snapshots are used. - fn cache_target>(&self, outdir: P, name: &str) -> Result<()> { - let t = self - .targets - .signed - .find_target(name) - .context(error::CacheTargetMissing { - target_name: name.to_owned(), - })?; - let (sha, filename) = self.target_digest_and_filename(t, name); - let mut reader = self.fetch_target(t, &sha, filename.as_str())?; - let path = outdir.as_ref().join(filename); - let mut f = OpenOptions::new() - .write(true) - .create(true) - .open(&path) - .context(error::CacheTargetWrite { path: path.clone() })?; - let _ = std::io::copy(&mut reader, &mut f).context(error::CacheTargetWrite { path })?; - Ok(()) + fn cache_target>(&self, outdir: P, name: &TargetName) -> Result<()> { + self.save_target( + name, + outdir, + if self.consistent_snapshot { + Prefix::Digest + } else { + Prefix::None + }, + ) } /// Gets the max size of the snapshot.json file as specified by the timestamp file. @@ -242,13 +234,16 @@ impl Repository { pub(crate) fn target_digest_and_filename( &self, target: &Target, - name: &str, + name: &TargetName, ) -> (Vec, String) { let sha256 = &target.hashes.sha256.clone().into_vec(); if self.consistent_snapshot { - (sha256.clone(), format!("{}.{}", hex::encode(sha256), name)) + ( + sha256.clone(), + format!("{}.{}", hex::encode(sha256), name.resolved()), + ) } else { - (sha256.clone(), name.to_owned()) + (sha256.clone(), name.resolved().to_owned()) } } diff --git a/tough/src/editor/mod.rs b/tough/src/editor/mod.rs index da024a4a..84d1f6ae 100644 --- a/tough/src/editor/mod.rs +++ b/tough/src/editor/mod.rs @@ -21,8 +21,8 @@ use crate::schema::{ Targets, Timestamp, TimestampMeta, }; use crate::transport::Transport; -use crate::Limits; -use crate::Repository; +use crate::{encode_filename, Limits}; +use crate::{Repository, TargetName}; use chrono::{DateTime, Utc}; use ring::digest::{SHA256, SHA256_OUTPUT_LEN}; use ring::rand::SystemRandom; @@ -30,6 +30,8 @@ use serde_json::Value; use snafu::{ensure, OptionExt, ResultExt}; use std::borrow::Cow; use std::collections::HashMap; +use std::convert::TryInto; +use std::fmt::Display; use std::num::NonZeroU64; use std::path::Path; use url::Url; @@ -193,6 +195,14 @@ impl RepositoryEditor { .build_timestamp(&signed_snapshot) .and_then(|timestamp| SignedRole::new(timestamp, &root, keys, &rng))?; + // This validation can only be done from the top level targets.json role. This check verifies + // that each target's delegate hierarchy is a match (i.e. its delegate ownership is valid). + signed_targets + .signed + .signed + .validate() + .context(error::InvalidPath)?; + Ok(SignedRepository { root: self.signed_root, targets: signed_targets, @@ -256,13 +266,17 @@ impl RepositoryEditor { } /// Add a `Target` to the repository - pub fn add_target(&mut self, name: &str, target: Target) -> Result<&mut Self> { - self.targets_editor_mut()?.add_target(name, target); + pub fn add_target(&mut self, name: T, target: Target) -> Result<&mut Self> + where + T: TryInto, + E: Display, + { + self.targets_editor_mut()?.add_target(name, target)?; Ok(self) } /// Remove a `Target` from the repository - pub fn remove_target(&mut self, name: &str) -> Result<&mut Self> { + pub fn remove_target(&mut self, name: &TargetName) -> Result<&mut Self> { self.targets_editor_mut()?.remove_target(name); Ok(self) @@ -279,7 +293,7 @@ impl RepositoryEditor { P: AsRef, { let (target_name, target) = RepositoryEditor::build_target(target_path)?; - self.add_target(&target_name, target)?; + self.add_target(target_name, target)?; Ok(self) } @@ -292,31 +306,32 @@ impl RepositoryEditor { { for target in targets { let (target_name, target) = RepositoryEditor::build_target(target)?; - self.add_target(&target_name, target)?; + self.add_target(target_name, target)?; } Ok(self) } /// Builds a target struct for the given path - pub fn build_target

(target_path: P) -> Result<(String, Target)> + pub fn build_target

(target_path: P) -> Result<(TargetName, Target)> where P: AsRef, { let target_path = target_path.as_ref(); + // Get the file name as a string + let target_name = TargetName::new( + target_path + .file_name() + .context(error::NoFileName { path: target_path })? + .to_str() + .context(error::PathUtf8 { path: target_path })?, + )?; + // Build a Target from the path given. If it is not a file, this will fail let target = Target::from_path(target_path).context(error::TargetFromPath { path: target_path })?; - // Get the file name as a string - let target_name = target_path - .file_name() - .context(error::NoFileName { path: target_path })? - .to_str() - .context(error::PathUtf8 { path: target_path })? - .to_owned(); - Ok((target_name, target)) } @@ -487,11 +502,15 @@ impl RepositoryEditor { .signed; let metadata_base_url = parse_url(metadata_url)?; // path to updated metadata + let encoded_name = encode_filename(name); + let encoded_filename = format!("{}.json", encoded_name); let role_url = metadata_base_url - .join(&format!("{}.json", name)) - .context(error::JoinUrl { - path: name.to_string(), + .join(&encoded_filename) + .with_context(|| error::JoinUrlEncoded { + original: name, + encoded: encoded_name, + filename: encoded_filename, url: metadata_base_url.clone(), })?; let reader = Box::new(fetch_max_size( @@ -548,13 +567,16 @@ impl RepositoryEditor { // load the new roles for name in new_roles { // path to new metadata - let role_url = - metadata_base_url - .join(&format!("{}.json", name)) - .context(error::JoinUrl { - path: name.to_string(), - url: metadata_base_url.clone(), - })?; + let encoded_name = encode_filename(&name); + let encoded_filename = format!("{}.json", encoded_name); + let role_url = metadata_base_url.join(&encoded_filename).with_context(|| { + error::JoinUrlEncoded { + original: &name, + encoded: encoded_name, + filename: encoded_filename, + url: metadata_base_url.clone(), + } + })?; let reader = Box::new(fetch_max_size( transport.as_ref(), role_url, diff --git a/tough/src/editor/signed.rs b/tough/src/editor/signed.rs index edbd2091..abd04cbb 100644 --- a/tough/src/editor/signed.rs +++ b/tough/src/editor/signed.rs @@ -27,6 +27,8 @@ use std::os::unix::fs::symlink; #[cfg(target_os = "windows")] use std::os::windows::fs::symlink_file as symlink; +use crate::TargetName; +use std::borrow::Cow; use std::path::{Path, PathBuf}; use url::Url; use walkdir::WalkDir; @@ -310,7 +312,7 @@ impl SignedRepository { input_path: &Path, outdir: &Path, replace_behavior: PathExists, - target_filename: Option<&str>, + target_filename: Option<&TargetName>, ) -> Result<()> { ensure!( input_path.is_file(), @@ -352,7 +354,7 @@ impl SignedRepository { input_path: &Path, outdir: &Path, replace_behavior: PathExists, - target_filename: Option<&str>, + target_filename: Option<&TargetName>, ) -> Result<()> { ensure!( input_path.is_file(), @@ -385,7 +387,7 @@ impl SignedRepository { } impl TargetsWalker for SignedRepository { - fn targets(&self) -> HashMap { + fn targets(&self) -> HashMap { // Since there is access to `targets.json` metadata, all targets // can be found using `targets_map()` self.targets.signed.signed.targets_map() @@ -484,7 +486,7 @@ impl SignedDelegatedTargets { input_path: &Path, outdir: &Path, replace_behavior: PathExists, - target_filename: Option<&str>, + target_filename: Option<&TargetName>, ) -> Result<()> { ensure!( input_path.is_file(), @@ -526,7 +528,7 @@ impl SignedDelegatedTargets { input_path: &Path, outdir: &Path, replace_behavior: PathExists, - target_filename: Option<&str>, + target_filename: Option<&TargetName>, ) -> Result<()> { ensure!( input_path.is_file(), @@ -559,7 +561,7 @@ impl SignedDelegatedTargets { } impl TargetsWalker for SignedDelegatedTargets { - fn targets(&self) -> HashMap { + fn targets(&self) -> HashMap { // There are multiple `Targets` roles here that may or may not be related, // so find all of the `Target`s related to each role and combine them. let mut targets_map = HashMap::new(); @@ -580,7 +582,7 @@ impl TargetsWalker for SignedDelegatedTargets { /// also determine if a file prefix needs to be used. trait TargetsWalker { /// Returns a map of all targets this manager is responsible for - fn targets(&self) -> HashMap; + fn targets(&self) -> HashMap; /// Determines whether or not consistent snapshot filenames should be used fn consistent_snapshot(&self) -> bool; @@ -595,7 +597,7 @@ trait TargetsWalker { replace_behavior: PathExists, ) -> Result<()> where - F: Fn(&Self, &Path, &Path, PathExists, Option<&str>) -> Result<()>, + F: Fn(&Self, &Path, &Path, PathExists, Option<&TargetName>) -> Result<()>, { std::fs::create_dir_all(outdir).context(error::DirCreate { path: outdir })?; @@ -636,20 +638,22 @@ trait TargetsWalker { &self, input: &Path, outdir: &Path, - target_filename: Option<&str>, + target_filename: Option<&TargetName>, ) -> Result { let outdir = std::fs::canonicalize(outdir).context(error::AbsolutePath { path: outdir })?; // If the caller requested a specific target filename, use that, otherwise use the filename // component of the input path. - let file_name = if let Some(target_filename) = target_filename { - target_filename + let target_name = if let Some(target_filename) = target_filename { + Cow::Borrowed(target_filename) } else { - input - .file_name() - .context(error::NoFileName { path: input })? - .to_str() - .context(error::PathUtf8 { path: input })? + Cow::Owned(TargetName::new( + input + .file_name() + .context(error::NoFileName { path: input })? + .to_str() + .context(error::PathUtf8 { path: input })?, + )?) }; // create a Target object using the input path. @@ -660,7 +664,7 @@ trait TargetsWalker { // with that name. If so... let repo_targets = &self.targets(); let repo_target = repo_targets - .get(file_name) + .get(&target_name) .context(error::PathIsNotTarget { path: input })?; // compare the hashes of the target from the repo and the target we just created. They // should match, or we alert the caller; if target replacement is intended, it should @@ -678,10 +682,10 @@ trait TargetsWalker { outdir.join(format!( "{}.{}", hex::encode(&target_from_path.hashes.sha256), - file_name + target_name.resolved() )) } else { - outdir.join(&file_name) + outdir.join(target_name.resolved()) }; // Return the target path, using the `TargetPath` enum that represents the type of file diff --git a/tough/src/editor/targets.rs b/tough/src/editor/targets.rs index a0901ec9..c8381e2d 100644 --- a/tough/src/editor/targets.rs +++ b/tough/src/editor/targets.rs @@ -15,14 +15,16 @@ use crate::schema::{ Targets, }; use crate::transport::Transport; -use crate::Limits; -use crate::Repository; +use crate::{encode_filename, Limits}; +use crate::{Repository, TargetName}; use chrono::{DateTime, Utc}; use ring::rand::SystemRandom; use serde_json::Value; use snafu::{OptionExt, ResultExt}; use std::borrow::Cow; use std::collections::HashMap; +use std::convert::TryInto; +use std::fmt::Display; use std::num::NonZeroU64; use std::path::Path; use url::Url; @@ -65,9 +67,9 @@ pub struct TargetsEditor { /// for "targets" on a repository that doesn't use delegated targets delegations: Option, /// New targets that were added to `name` - new_targets: Option>, + new_targets: Option>, /// Targets that were previously in `name` - existing_targets: Option>, + existing_targets: Option>, /// Version of the `Targets` version: Option, /// Expiration of the `Targets` @@ -175,11 +177,21 @@ impl TargetsEditor { } /// Add a `Target` to the `Targets` role - pub fn add_target(&mut self, name: &str, target: Target) -> &mut Self { + pub fn add_target(&mut self, name: T, target: Target) -> Result<&mut Self> + where + T: TryInto, + E: Display, + { + let target_name = name.try_into().map_err(|e| { + error::InvalidTargetName { + inner: e.to_string(), + } + .build() + })?; self.new_targets .get_or_insert_with(HashMap::new) - .insert(name.to_string(), target); - self + .insert(target_name, target); + Ok(self) } /// Add a target to the repository using its path @@ -194,19 +206,20 @@ impl TargetsEditor { { let target_path = target_path.as_ref(); + // Get the file name as a string + let target_name = TargetName::new( + target_path + .file_name() + .context(error::NoFileName { path: target_path })? + .to_str() + .context(error::PathUtf8 { path: target_path })?, + )?; + // Build a Target from the path given. If it is not a file, this will fail let target = Target::from_path(target_path).context(error::TargetFromPath { path: target_path })?; - // Get the file name as a string - let target_name = target_path - .file_name() - .context(error::NoFileName { path: target_path })? - .to_str() - .context(error::PathUtf8 { path: target_path })? - .to_owned(); - - self.add_target(&target_name, target); + self.add_target(target_name, target)?; Ok(self) } @@ -224,7 +237,7 @@ impl TargetsEditor { } /// Remove a `Target` from the targets if it exists - pub fn remove_target(&mut self, name: &str) -> &mut Self { + pub fn remove_target(&mut self, name: &TargetName) -> &mut Self { if let Some(targets) = self.existing_targets.as_mut() { targets.remove(name); } @@ -377,11 +390,15 @@ impl TargetsEditor { let metadata_base_url = parse_url(metadata_url)?; // path to updated metadata + let encoded_name = encode_filename(name); + let encoded_filename = format!("{}.json", encoded_name); let role_url = metadata_base_url - .join(&format!("{}.json", name)) - .context(error::JoinUrl { - path: name.to_string(), + .join(&encoded_filename) + .with_context(|| error::JoinUrlEncoded { + original: name, + encoded: encoded_name, + filename: encoded_filename, url: metadata_base_url, })?; let reader = Box::new(fetch_max_size( @@ -430,7 +447,7 @@ impl TargetsEditor { // the most common use case, it's possible this is what a user wants. // If it's important to have a non-empty targets, the object can be // inspected by the calling code. - let mut targets: HashMap = HashMap::new(); + let mut targets: HashMap = HashMap::new(); if let Some(ref existing_targets) = self.existing_targets { targets.extend(existing_targets.clone()); } diff --git a/tough/src/editor/test.rs b/tough/src/editor/test.rs index b8268d5e..b41f017f 100644 --- a/tough/src/editor/test.rs +++ b/tough/src/editor/test.rs @@ -6,6 +6,7 @@ mod tests { use crate::editor::RepositoryEditor; use crate::key_source::LocalKeySource; use crate::schema::{Signed, Snapshot, Target, Targets, Timestamp}; + use crate::TargetName; use chrono::{Duration, Utc}; use std::num::NonZeroU64; use std::path::PathBuf; @@ -73,7 +74,7 @@ mod tests { editor .targets(targets) .unwrap() - .add_target("file4.txt", target4) + .add_target(TargetName::new("file4.txt").unwrap(), target4) .unwrap() .add_target_path(target3_path) .unwrap(); diff --git a/tough/src/error.rs b/tough/src/error.rs index 604d0329..5b1f6351 100644 --- a/tough/src/error.rs +++ b/tough/src/error.rs @@ -6,7 +6,7 @@ #![allow(clippy::default_trait_access)] use crate::schema::RoleType; -use crate::{schema, TransportError}; +use crate::{schema, TargetName, TransportError}; use chrono::{DateTime, Utc}; use snafu::{Backtrace, Snafu}; use std::io; @@ -141,6 +141,9 @@ pub enum Error { #[snafu(display("Source path for target must be file or symlink - '{}'", path.display()))] InvalidFileType { path: PathBuf, backtrace: Backtrace }, + #[snafu(display("Encountered an invalid target name: {}", inner))] + InvalidTargetName { inner: String, backtrace: Backtrace }, + /// The library failed to create a URL from a base URL and a path. #[snafu(display("Failed to join \"{}\" to URL \"{}\": {}", path, url, source))] JoinUrl { @@ -150,6 +153,23 @@ pub enum Error { backtrace: Backtrace, }, + #[snafu(display( + "After encoding the name '{}' to '{}', failed to join '{}' to URL '{}': {}", + original, + encoded, + filename, + url, + source + ))] + JoinUrlEncoded { + original: String, + encoded: String, + filename: String, + url: url::Url, + source: url::ParseError, + backtrace: Backtrace, + }, + #[snafu(display("Unable to parse keypair: {}", source))] KeyPairFromKeySource { source: Box, @@ -201,6 +221,20 @@ pub enum Error { #[snafu(display("Missing '{}' when building repo from RepositoryEditor", field))] Missing { field: String, backtrace: Backtrace }, + #[snafu(display("Unable to create NamedTempFile in directory '{}': {}", path.display(), source))] + NamedTempFileCreate { + path: PathBuf, + source: std::io::Error, + backtrace: Backtrace, + }, + + #[snafu(display("Unable to persist NamedTempFile to '{}': {}", path.display(), source))] + NamedTempFilePersist { + path: PathBuf, + source: tempfile::PersistError, + backtrace: Backtrace, + }, + /// Unable to determine file name (path ends in '..' or is '/') #[snafu(display("Unable to determine file name from path: '{}'", path.display()))] NoFileName { path: PathBuf, backtrace: Backtrace }, @@ -276,6 +310,52 @@ pub enum Error { backtrace: Backtrace, }, + #[snafu(display("Unable to get info about the outdir '{}': {}", path.display(), source))] + SaveTargetDirInfo { + path: PathBuf, + source: std::io::Error, + backtrace: Backtrace, + }, + + #[snafu(display("The outdir '{}' either does not exist or is not a directory", path.display()))] + SaveTargetOutdir { path: PathBuf, backtrace: Backtrace }, + + #[snafu(display("Unable to canonicalize the outdir '{}': {}", path.display(), source))] + SaveTargetOutdirCanonicalize { + path: PathBuf, + source: std::io::Error, + backtrace: Backtrace, + }, + + #[snafu(display( + "The path '{}' to which we would save target '{}' has no parent", + path.display(), + name.raw(), + ))] + SaveTargetNoParent { + path: PathBuf, + name: TargetName, + backtrace: Backtrace, + }, + + #[snafu(display("The target '{}' was not found", name.raw()))] + SaveTargetNotFound { + name: TargetName, + backtrace: Backtrace, + }, + + #[snafu(display( + "The target '{}' had an unsafe name. Not writing to '{}' because it is not in the outdir '{}'", + name.raw(), + filepath.display(), + outdir.display() + ))] + SaveTargetUnsafePath { + name: TargetName, + outdir: PathBuf, + filepath: PathBuf, + }, + #[snafu(display("Failed to serialize role '{}' for signing: {}", role, source))] SerializeRole { role: String, @@ -342,6 +422,21 @@ pub enum Error { backtrace: Backtrace, }, + #[snafu(display("Unable to resolve the target name '{}': {}", name, source))] + TargetNameResolve { + name: String, + source: std::io::Error, + }, + + #[snafu(display( + "Unable to resolve target name '{}', a path with no components was produced", + name + ))] + TargetNameComponentsEmpty { name: String }, + + #[snafu(display("Unable to resolve target name '{}', expected a rooted path", name))] + TargetNameRootMissing { name: String }, + /// A transport error occurred while fetching a URL. #[snafu(display("Failed to fetch {}: {}", url, source))] Transport { @@ -350,6 +445,24 @@ pub enum Error { backtrace: Backtrace, }, + #[snafu(display( + "The target name '..' is unsafe. Interpreting it as a path could escape from the intended \ + directory", + ))] + UnsafeTargetNameDotDot {}, + + #[snafu(display( + "The target name '{}' is unsafe. Interpreting it as a path would lead to an empty filename", + name + ))] + UnsafeTargetNameEmpty { name: String }, + + #[snafu(display( + "The target name '{}' is unsafe. Interpreting it as a path would lead to a filename of '/'", + name + ))] + UnsafeTargetNameSlash { name: String }, + /// A metadata file could not be verified. #[snafu(display("Failed to verify {} metadata: {}", role, source))] VerifyMetadata { @@ -414,9 +527,9 @@ pub enum Error { backtrace: Backtrace, }, - #[snafu(display("The target '{}' was not found", target_name))] + #[snafu(display("The target '{}' was not found", target_name.raw()))] CacheTargetMissing { - target_name: String, + target_name: TargetName, source: crate::schema::Error, backtrace: Backtrace, }, @@ -428,9 +541,6 @@ pub enum Error { backtrace: Backtrace, }, - #[snafu(display("Target file not delegated: {}", target_url))] - TargetNotFound { target_url: String }, - #[snafu(display("Delegated role not found: {}", name))] DelegateNotFound { name: String }, diff --git a/tough/src/lib.rs b/tough/src/lib.rs index e6ab74fe..79208b80 100644 --- a/tough/src/lib.rs +++ b/tough/src/lib.rs @@ -40,6 +40,7 @@ mod io; pub mod key_source; pub mod schema; pub mod sign; +mod target_name; mod transport; use crate::datastore::Datastore; @@ -48,16 +49,22 @@ use crate::fetch::{fetch_max_size, fetch_sha256}; /// An HTTP transport that includes retries. #[cfg(feature = "http")] pub use crate::http::{HttpTransport, HttpTransportBuilder, RetryRead}; -use crate::schema::{DelegatedRole, Delegations}; -use crate::schema::{Role, RoleType, Root, Signed, Snapshot, Timestamp}; +use crate::schema::{ + DelegatedRole, Delegations, Role, RoleType, Root, Signed, Snapshot, Timestamp, +}; +pub use crate::target_name::TargetName; pub use crate::transport::{ DefaultTransport, FilesystemTransport, Transport, TransportError, TransportErrorKind, }; use chrono::{DateTime, Utc}; +use log::warn; +use percent_encoding::{utf8_percent_encode, AsciiSet, NON_ALPHANUMERIC}; use snafu::{ensure, OptionExt, ResultExt}; use std::collections::HashMap; +use std::fs::create_dir_all; use std::io::Read; -use std::path::PathBuf; +use std::path::{Path, PathBuf}; +use tempfile::NamedTempFile; use url::Url; /// Represents whether a Repository should fail to load when metadata is expired (`Safe`) or whether @@ -268,6 +275,16 @@ impl Default for Limits { } } +/// Use this enum to specify whether or not we should include a prefix in the target name when +/// saving a target. +#[derive(Debug, Copy, Clone, Eq, PartialEq)] +pub enum Prefix { + /// Do not prepend the target name when saving the target file, e.g. `my-target.txt`. + None, + /// Prepend the sha digest when saving the target file, e.g. `0123456789abcdef.my-target.txt`. + Digest, +} + /// A TUF repository. /// /// You can create a `Repository` using a [`RepositoryLoader`]. @@ -404,7 +421,7 @@ impl Repository { /// before its checksum is validated. If the maximum size is reached or there is a checksum /// mismatch, the reader returns a [`std::io::Error`]. **Consumers of this library must not use /// data from the reader if it returns an error.** - pub fn read_target(&self, name: &str) -> Result> { + pub fn read_target(&self, name: &TargetName) -> Result> { // Check for repository metadata expiration. if self.expiration_enforcement == ExpirationEnforcement::Safe { ensure!( @@ -440,12 +457,127 @@ impl Repository { }) } + /// Fetches a target from the repository and saves it to `outdir`. Attempts to do this as safely + /// as possible by using `path_clean` to eliminate `../` path traversals from the the target's + /// name. Ensures that the resulting filepath is in `outdir` or a child of `outdir`. + /// + /// # Parameters + /// + /// - `name`: the target name. + /// - `outdir`: the directory to save the target in. + /// - `prepend`: Whether or not to prepend the sha digest when saving the target file. + /// + /// # Preconditions and Behavior + /// + /// - `outdir` must exist. For safety we want to canonicalize the path before we join to it. + /// - intermediate directories will be created in `outdir` with `create_dir_all` + /// - Will error if the result of path resolution results in a filepath outside of `outdir` or + /// outside of a delegated target's correct path of delegation. + /// + pub fn save_target

(&self, name: &TargetName, outdir: P, prepend: Prefix) -> Result<()> + where + P: AsRef, + { + // Ensure the outdir exists then canonicalize the path. + let outdir = outdir.as_ref(); + let outdir = outdir + .canonicalize() + .context(error::SaveTargetOutdirCanonicalize { path: outdir })?; + ensure!(outdir.is_dir(), error::SaveTargetOutdir { path: outdir }); + + if name.resolved() != name.raw() { + // Since target names with resolvable path segments are unusual and potentially unsafe, + // we warn the user that we have encountered them. + warn!( + "The target named '{}' had path segments that were resolved to produce the \ + following name: {}", + name.raw(), + name.resolved() + ); + } + + let filename = match prepend { + Prefix::Digest => { + let target = self.targets.signed.find_target(name).with_context(|| { + error::CacheTargetMissing { + target_name: name.clone(), + } + })?; + let sha256 = target.hashes.sha256.clone().into_vec(); + format!("{}.{}", hex::encode(sha256), name.resolved()) + } + Prefix::None => name.resolved().to_owned(), + }; + + let resolved_filepath = outdir.join(filename); + + // Find out what directory we will be writing the target file to. + let filepath_dir = + resolved_filepath + .parent() + .with_context(|| error::SaveTargetNoParent { + path: &resolved_filepath, + name: name.clone(), + })?; + + // Make sure the filepath we are writing to is in or below outdir. + ensure!( + filepath_dir.starts_with(&outdir), + error::SaveTargetUnsafePath { + name: name.clone(), + outdir, + filepath: &resolved_filepath, + } + ); + + // Fetch and write the target using NamedTempFile for an atomic file creation. + let mut reader = self + .read_target(name)? + .with_context(|| error::SaveTargetNotFound { name: name.clone() })?; + create_dir_all(&filepath_dir).context(error::DirCreate { + path: &filepath_dir, + })?; + let mut f = NamedTempFile::new_in(&filepath_dir).context(error::NamedTempFileCreate { + path: &filepath_dir, + })?; + std::io::copy(&mut reader, &mut f).context(error::FileWrite { path: &f.path() })?; + f.persist(&resolved_filepath) + .context(error::NamedTempFilePersist { + path: resolved_filepath, + })?; + + Ok(()) + } + /// Return the named `DelegatedRole` if found. pub fn delegated_role(&self, name: &str) -> Option<&DelegatedRole> { self.targets.signed.delegated_role(name).ok() } } +/// The set of characters that will be escaped when converting a delegated role name into a +/// filename. This needs to at least include path traversal characters to prevent tough from writing +/// outside of its datastore. +/// +/// In order to match the Python TUF implementation, we mimic the Python function +/// [urllib.parse.quote] (given a 'safe' parameter value of `""`) which follows RFC 3986 and states +/// +/// > Replace special characters in string using the %xx escape. Letters, digits, and the characters +/// `_.-~` are never quoted. +/// +/// [urllib.parse.quote]: https://docs.python.org/3/library/urllib.parse.html#url-quoting +const CHARACTERS_TO_ESCAPE: AsciiSet = NON_ALPHANUMERIC + .remove(b'_') + .remove(b'.') + .remove(b'-') + .remove(b'~'); + +/// Percent encode a potential filename to ensure it is safe and does not have path traversal +/// characters. +pub(crate) fn encode_filename>(name: S) -> String { + utf8_percent_encode(name.as_ref(), &CHARACTERS_TO_ESCAPE).to_string() +} + /// Ensures that system time has not stepped backward since it was last sampled fn system_time(datastore: &Datastore) -> Result> { let file = "latest_known_time.json"; @@ -997,6 +1129,9 @@ fn load_targets( )?; } + // This validation can only be done from the top level targets.json role. This check verifies + // that each target's delegate hierarchy is a match (i.e. it's delegate ownership is valid). + targets.signed.validate().context(error::InvalidPath)?; Ok(targets) } @@ -1023,9 +1158,13 @@ fn load_delegations( })?; let path = if consistent_snapshot { - format!("{}.{}.json", &role_meta.version, &delegated_role.name) + format!( + "{}.{}.json", + &role_meta.version, + encode_filename(&delegated_role.name) + ) } else { - format!("{}.json", &delegated_role.name) + format!("{}.json", encode_filename(&delegated_role.name)) }; let role_url = metadata_base_url.join(&path).context(error::JoinUrl { path: path.clone(), @@ -1058,11 +1197,6 @@ fn load_delegations( expected: role_meta.version } ); - { - if let Some(delegations) = role.signed.delegations.as_ref() { - delegations.verify_paths().context(error::InvalidPath {})?; - } - } datastore.create(&path, &role)?; delegated_roles.insert(delegated_role.name.clone(), Some(role)); @@ -1124,4 +1258,93 @@ mod tests { let default = ExpirationEnforcement::default(); assert_eq!(default, ExpirationEnforcement::Safe); } + + #[test] + fn encode_filename_1() { + let input = "../a"; + let expected = "..%2Fa"; + let actual = encode_filename(input); + assert_eq!(expected, actual); + } + + #[test] + fn encode_filename_2() { + let input = ""; + let expected = ""; + let actual = encode_filename(input); + assert_eq!(expected, actual); + } + + #[test] + fn encode_filename_3() { + let input = "."; + let expected = "."; + let actual = encode_filename(input); + assert_eq!(expected, actual); + } + + #[test] + fn encode_filename_4() { + let input = "/"; + let expected = "%2F"; + let actual = encode_filename(input); + assert_eq!(expected, actual); + } + + #[test] + fn encode_filename_5() { + let input = "ö"; + let expected = "%C3%B6"; + let actual = encode_filename(input); + assert_eq!(expected, actual); + } + + #[test] + fn encode_filename_6() { + let input = "!@#$%^&*()[]|\\~`'\";:.,>, + pub targets: HashMap, /// Delegations describes subsets of the targets for which responsibility is delegated to /// another role. @@ -501,13 +505,22 @@ impl Targets { } } - /// Given a target url, returns a reference to the Target struct or error if the target is unreachable - pub fn find_target(&self, target_name: &str) -> Result<&Target> { + /// Given a target url, returns a reference to the Target struct or error if the target is + /// unreachable. + /// + /// **Caution**: does not imply that delegations in this struct or any child are valid. + /// + pub fn find_target(&self, target_name: &TargetName) -> Result<&Target> { if let Some(target) = self.targets.get(target_name) { return Ok(target); } if let Some(delegations) = &self.delegations { for role in &delegations.roles { + // If the target cannot match this DelegatedRole, then we do not want to recurse and + // check any of its child roles either. + if !role.paths.matches_target_name(target_name) { + continue; + } if let Some(targets) = &role.targets { if let Ok(target) = targets.signed.find_target(target_name) { return Ok(target); @@ -515,31 +528,31 @@ impl Targets { } } } - Err(Error::TargetNotFound { - target_file: target_name.to_string(), - }) + error::TargetNotFound { + name: target_name.clone(), + } + .fail() } /// Returns a hashmap of all targets and all delegated targets recursively - pub fn targets_map(&self) -> HashMap { - let mut targets_map = HashMap::new(); - for target in &self.targets { - targets_map.insert(target.0.clone(), target.1); - } + pub fn targets_map(&self) -> HashMap { + self.targets_iter() + .map(|(target_name, target)| (target_name.clone(), target)) + .collect() + } + + /// Returns an iterator of all targets and all delegated targets recursively + pub fn targets_iter(&self) -> impl Iterator + '_ { + let mut iter: Box> = + Box::new(self.targets.iter()); if let Some(delegations) = &self.delegations { for role in &delegations.roles { if let Some(targets) = &role.targets { - targets_map.extend(targets.signed.targets_map()); + iter = Box::new(iter.chain(targets.signed.targets_iter())); } } } - - targets_map - } - - /// Returns an iterator of all targets delegated - pub fn targets_iter(&self) -> impl Iterator + '_ { - self.targets_map().into_iter() + iter } /// Recursively clears all targets @@ -555,12 +568,12 @@ impl Targets { } /// Add a target to targets - pub fn add_target(&mut self, name: &str, target: Target) { - self.targets.insert(name.to_string(), target); + pub fn add_target(&mut self, name: TargetName, target: Target) { + self.targets.insert(name, target); } /// Remove a target from targets - pub fn remove_target(&mut self, name: &str) -> Option { + pub fn remove_target(&mut self, name: &TargetName) -> Option { self.targets.remove(name) } @@ -698,6 +711,17 @@ impl Targets { needed_roles } + + /// Calls `find_target` on each target (recursively provided by `targets_iter`). This + /// proves that the target is either owned by us, or correctly matches through some hierarchy of + /// [`PathSets`] below us. When called on the top level [`Targets`] of a repository, this proves + /// that the ownership of each target is valid. + pub(crate) fn validate(&self) -> Result<()> { + for (target_name, _) in self.targets_iter() { + self.find_target(target_name)?; + } + Ok(()) + } } impl Role for Targets { @@ -758,9 +782,9 @@ impl Role for DelegatedTargets { fn filename(&self, consistent_snapshot: bool) -> String { if consistent_snapshot { - format!("{}.{}.json", self.version(), self.name) + format!("{}.{}.json", self.version(), encode_filename(&self.name)) } else { - format!("{}.json", self.name) + format!("{}.json", encode_filename(&self.name)) } } @@ -866,7 +890,7 @@ pub enum PathSet { /// PATHPATTERN, it is RECOMMENDED that PATHPATTERN uses the forward slash (/) as directory /// separator and does not start with a directory separator, akin to TARGETSPATH. #[serde(rename = "paths")] - Paths(Vec), + Paths(Vec), /// The "path_hash_prefixes" list is used to succinctly describe a set of target paths. /// Specifically, each HEX_DIGEST in "path_hash_prefixes" describes a set of target paths; @@ -876,24 +900,138 @@ pub enum PathSet { /// prefix as one of the prefixes in "path_hash_prefixes". This is useful to split a large /// number of targets into separate bins identified by consistent hashing. #[serde(rename = "path_hash_prefixes")] - PathHashPrefixes(Vec), + PathHashPrefixes(Vec), +} + +/// A glob-like path pattern for matching delegated targets, e.g. `foo/bar/*`. +/// +/// `PATHPATTERN` supports the Unix shell pattern matching convention for paths +/// ([glob](https://man7.org/linux/man-pages/man7/glob.7.html)bing pathnames). Its format may either +/// indicate a path to a single file, or to multiple files with the use of shell-style wildcards +/// (`*` or `?`). To avoid surprising behavior when matching targets with `PATHPATTERN` it is +/// RECOMMENDED that `PATHPATTERN` uses the forward slash (`/`) as directory separator and does +/// not start with a directory separator, as is also recommended for `TARGETPATH`. A path +/// separator in a path SHOULD NOT be matched by a wildcard in the `PATHPATTERN`. +/// +/// Some example `PATHPATTERN`s and expected matches: +/// * a `PATHPATTERN` of `"targets/*.tgz"` would match file paths `"targets/foo.tgz"` and +/// `"targets/bar.tgz"`, but not `"targets/foo.txt"`. +/// * a `PATHPATTERN` of `"foo-version-?.tgz"` matches `"foo-version-2.tgz"` and +/// `"foo-version-a.tgz"`, but not `"foo-version-alpha.tgz"`. +/// * a `PATHPATTERN` of `"*.tgz"` would match `"foo.tgz"` and `"bar.tgz"`, +/// but not `"targets/foo.tgz"` +/// * a `PATHPATTERN` of `"foo.tgz"` would match only `"foo.tgz"` +#[derive(Clone, Debug)] +pub struct PathPattern { + value: String, + glob: GlobMatcher, +} + +impl PathPattern { + /// Create a new, valid `PathPattern`. This will fail if we cannot parse the value as a glob. It is important that + /// our implementation stop if it encounters a glob it cannot parse so that we do not load repositories where we + /// cannot enforce delegate ownership. + pub fn new>(value: S) -> Result { + let value = value.into(); + let glob = Glob::new(&value) + .context(error::Glob { pattern: &value })? + .compile_matcher(); + Ok(Self { value, glob }) + } + + /// Get the inner value of this `PathPattern` as a string. + pub fn value(&self) -> &str { + &self.value + } + + fn matches_target_name(&self, target_name: &TargetName) -> bool { + self.glob.is_match(target_name.resolved()) + } +} + +impl FromStr for PathPattern { + type Err = Error; + + fn from_str(s: &str) -> Result { + PathPattern::new(s) + } +} + +impl PartialEq for PathPattern { + fn eq(&self, other: &Self) -> bool { + PartialEq::eq(&self.value, &other.value) + } +} + +impl Serialize for PathPattern { + fn serialize(&self, serializer: S) -> std::result::Result + where + S: Serializer, + { + serializer.serialize_str(self.value().as_ref()) + } +} + +impl<'de> Deserialize<'de> for PathPattern { + fn deserialize(deserializer: D) -> std::result::Result + where + D: Deserializer<'de>, + { + let s = ::deserialize(deserializer)?; + PathPattern::new(s).map_err(|e| D::Error::custom(format!("{}", e))) + } +} + +/// The first characters found in the string representation of a sha256 digest. This can be used for +/// randomly sharding a repository. See [`PathSet::PathHashDigest`] for the description of how this +/// is used. +#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] +pub struct PathHashPrefix(String); + +impl PathHashPrefix { + /// Create a new, valid `PathPattern`. + pub fn new>(value: S) -> Result { + // In case we choose to reject some of these in the future, we return a result. For now this + // will always succeed. + Ok(PathHashPrefix(value.into())) + } + + /// Get the inner value of this `PathPattern` as a string. + pub fn value(&self) -> &str { + &self.0 + } + + fn matches_target_name(&self, target_name: &TargetName) -> bool { + let target_name_digest = + digest(&SHA256, target_name.resolved().as_bytes()).encode_hex::(); + target_name_digest.starts_with(self.value()) + } +} + +impl FromStr for PathHashPrefix { + type Err = Error; + + fn from_str(s: &str) -> Result { + PathHashPrefix::new(s) + } } impl PathSet { - /// Given a target string determines if paths match - fn matched_target(&self, target: &str) -> bool { + /// Given a `target_name`, returns whether or not this `PathSet` contains a pattern or hash + /// prefix that matches. + fn matches_target_name(&self, target_name: &TargetName) -> bool { match self { Self::Paths(paths) => { for path in paths { - if Self::matched_path(path, target) { + if path.matches_target_name(target_name) { return true; } } } Self::PathHashPrefixes(path_prefixes) => { - for path in path_prefixes { - if Self::matched_prefix(path, target) { + for prefix in path_prefixes { + if prefix.matches_target_name(target_name) { return true; } } @@ -901,30 +1039,6 @@ impl PathSet { } false } - - /// Given a path hash prefix and a target path determines if target is delegated by prefix - fn matched_prefix(prefix: &str, target: &str) -> bool { - let temp_target = target.to_string(); - let hash = digest(&SHA256, temp_target.as_bytes()); - hash.as_ref().starts_with(prefix.as_bytes()) - } - - /// Given a shell style wildcard path determines if target matches the path - fn matched_path(wildcardpath: &str, target: &str) -> bool { - let glob = if let Ok(glob) = Glob::new(wildcardpath) { - glob.compile_matcher() - } else { - return false; - }; - glob.is_match(target) - } - - /// Returns a Vec representation of the `PathSet` - pub fn vec(&self) -> &Vec { - match self { - PathSet::Paths(x) | PathSet::PathHashPrefixes(x) => x, - } - } } impl Delegations { @@ -937,32 +1051,15 @@ impl Delegations { } /// Determines if target passes pathset specific matching - pub fn target_is_delegated(&self, target: &str) -> bool { + pub fn target_is_delegated(&self, target: &TargetName) -> bool { for role in &self.roles { - if role.paths.matched_target(target) { + if role.paths.matches_target_name(target) { return true; } } false } - /// Ensures that all delegated paths are allowed to be delegated - pub fn verify_paths(&self) -> Result<()> { - for sub_role in &self.roles { - let pathset = match &sub_role.paths { - PathSet::Paths(paths) | PathSet::PathHashPrefixes(paths) => paths, - }; - for path in pathset { - if !self.target_is_delegated(path) { - return Err(Error::UnmatchedPath { - child: path.to_string(), - }); - } - } - } - Ok(()) - } - /// Given an object/key that impls Sign, return the corresponding /// key ID from Delegation pub fn key_id(&self, key_pair: &dyn Sign) -> Option> { @@ -984,21 +1081,6 @@ impl DelegatedRole { _extra: HashMap::new(), } } - - /// Verify that paths can be delegated by this role - pub fn verify_paths(&self, paths: &PathSet) -> Result<()> { - let paths = match paths { - PathSet::Paths(x) | PathSet::PathHashPrefixes(x) => x, - }; - for path in paths { - if !self.paths.matched_target(path) { - return Err(Error::UnmatchedPath { - child: path.to_string(), - }); - } - } - Ok(()) - } } // =^..^= =^..^= =^..^= =^..^= =^..^= =^..^= =^..^= =^..^= =^..^= =^..^= =^..^= @@ -1087,3 +1169,102 @@ impl Role for Timestamp { "timestamp.json".to_string() } } + +#[test] +fn targets_iter_and_map_test() { + use maplit::hashmap; + + // Create a dummy Target object. + let nothing = Target { + length: 0, + hashes: Hashes { + sha256: [0u8].to_vec().into(), + _extra: Default::default(), + }, + custom: Default::default(), + _extra: Default::default(), + }; + + // Create a hierarchy of targets/delegations: a -> b -> c + let c_role = DelegatedRole { + name: "c-role".to_string(), + keyids: vec![], + threshold: NonZeroU64::new(1).unwrap(), + paths: PathSet::Paths(vec![PathPattern::new("*").unwrap()]), + terminating: false, + targets: Some(Signed { + signed: Targets { + spec_version: "".to_string(), + version: NonZeroU64::new(1).unwrap(), + expires: Utc::now(), + targets: hashmap! { + TargetName::new("c.txt").unwrap() => nothing.clone(), + }, + delegations: None, + _extra: Default::default(), + }, + signatures: vec![], + }), + }; + let b_delegations = Delegations { + keys: Default::default(), + roles: vec![c_role], + }; + let b_role = DelegatedRole { + name: "b-role".to_string(), + keyids: vec![], + threshold: NonZeroU64::new(1).unwrap(), + paths: PathSet::Paths(vec![PathPattern::new("*").unwrap()]), + terminating: false, + targets: Some(Signed { + signed: Targets { + spec_version: "".to_string(), + version: NonZeroU64::new(1).unwrap(), + expires: Utc::now(), + targets: hashmap! { + TargetName::new("b.txt").unwrap() => nothing.clone(), + }, + delegations: Some(b_delegations), + _extra: Default::default(), + }, + signatures: vec![], + }), + }; + let a_delegations = Delegations { + keys: Default::default(), + roles: vec![b_role], + }; + let a = Targets { + spec_version: "".to_string(), + version: NonZeroU64::new(1).unwrap(), + expires: Utc::now(), + targets: hashmap! { + TargetName::new("a.txt").unwrap() => nothing.clone(), + }, + delegations: Some(a_delegations), + _extra: Default::default(), + }; + + // Assert that targets_iter is recursive and thus has a.txt, b.txt and c.txt + assert!(a + .targets_iter() + .map(|(key, _)| key) + .find(|&item| item.raw() == "a.txt") + .is_some()); + assert!(a + .targets_iter() + .map(|(key, _)| key) + .find(|&item| item.raw() == "b.txt") + .is_some()); + assert!(a + .targets_iter() + .map(|(key, _)| key) + .find(|&item| item.raw() == "c.txt") + .is_some()); + + // Assert that targets_map is also recursive + let map = a.targets_map(); + assert!(map.contains_key(&TargetName::new("a.txt").unwrap())); + assert!(map.contains_key(&TargetName::new("b.txt").unwrap())); + assert!(map.contains_key(&TargetName::new("c.txt").unwrap())); +} diff --git a/tough/src/target_name.rs b/tough/src/target_name.rs new file mode 100644 index 00000000..3e05fe4d --- /dev/null +++ b/tough/src/target_name.rs @@ -0,0 +1,299 @@ +use crate::error::{self, Result}; +use path_absolutize::Absolutize; +use serde::de::Error; +use serde::{Deserialize, Deserializer, Serialize, Serializer}; +use snafu::{ensure, OptionExt, ResultExt}; +use std::convert::TryFrom; +use std::path::PathBuf; +use std::str::FromStr; + +/// Represents the name of a target in the repository. Path-like constructs are resolved (e.g. +/// `foo/../bar` becomes `bar`). Certain unsafe names are rejected when constructing a `TargetName`. +/// Unsafe names include: +/// - Anything that resolves to an empty string +/// - Anything that resolves to `/` +/// +/// `TargetName` intentionally does not impl String-like traits so that we are forced to choose +/// between the resolved name and the raw/original name when we use it as a string. +/// +/// Note that `Serialize` writes the `raw`, un-resolved name. You should not use the results of +/// serialization to form file paths. +/// +#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)] +pub struct TargetName { + /// The name assigned to the target by the repository user. + raw: String, + /// If the `raw` name is path-like, and it resolves to a simpler path construct, then the + /// resolved name is stored here. (As a CPU optimization). + resolved: Option, +} + +impl TargetName { + /// Construct a new `TargetName`. Unsafe names will return an error. + pub fn new>(raw: S) -> Result { + let raw = raw.into(); + let resolved = clean_name(&raw)?; + if raw == resolved { + Ok(Self { + raw, + resolved: None, + }) + } else { + Ok(Self { + raw, + resolved: Some(resolved), + }) + } + } + + /// Get the original, unchanged name (i.e. which might be something like `foo/../bar` instead of + /// `bar`). + pub fn raw(&self) -> &str { + &self.raw + } + + /// Get the resolved name (i.e. which would be `bar` instead of `foo/../bar`). + pub fn resolved(&self) -> &str { + match &self.resolved { + None => self.raw(), + Some(resolved) => resolved, + } + } +} + +impl FromStr for TargetName { + type Err = crate::error::Error; + + fn from_str(s: &str) -> Result { + Self::new(s) + } +} + +impl TryFrom for TargetName { + type Error = crate::error::Error; + + fn try_from(value: String) -> Result { + TargetName::new(value) + } +} + +impl TryFrom<&str> for TargetName { + type Error = crate::error::Error; + + fn try_from(value: &str) -> Result { + TargetName::new(value) + } +} + +impl Serialize for TargetName { + fn serialize(&self, serializer: S) -> std::result::Result + where + S: Serializer, + { + serializer.serialize_str(self.raw()) + } +} + +impl<'de> Deserialize<'de> for TargetName { + fn deserialize(deserializer: D) -> std::result::Result + where + D: Deserializer<'de>, + { + let s = ::deserialize(deserializer)?; + TargetName::new(s).map_err(|e| D::Error::custom(format!("{}", e))) + } +} + +// Resolves path-like constructs. e.g. `foo/../bar` becomes `bar`. +fn clean_name(name: &str) -> Result { + // This causes something to panic, so we check for it early. + ensure!(name != "..", error::UnsafeTargetNameDotDot); + + // Seems like bad things could happen if the target filename is the empty string. + ensure!(!name.is_empty(), error::UnsafeTargetNameEmpty { name }); + + // If our name starts with absolute, then we need to remember this so we can restore it later. + let name_path = PathBuf::from(name); + let absolute = name_path.is_absolute(); + + let clean = { + let proposed = name_path + .absolutize_from(&PathBuf::from("/")) + .context(error::TargetNameResolve { name })?; + + // `absolutize_from` will give us a path that starts with `/`, so we remove it if the + // original name did not start with `/` + if absolute { + // If `name` started with `/`, then we have nothing left to do because absolutize_from + // returns a rooted path. + proposed.to_path_buf() + } else { + let mut components = proposed.components(); + // If the original name did not start with `/`, we need to remove the leading slash + // here because absolutize_from will return a rooted path. + let first_component = components + .next() + // If this error occurs then there is a bug or behavior change in absolutize_from. + .context(error::TargetNameComponentsEmpty { name })? + .as_os_str(); + + // If the first component isn't `/` then there is a bug or behavior change in + // absolutize_from. + ensure!( + first_component == "/", + error::TargetNameRootMissing { name } + ); + + components.as_path().to_owned() + } + }; + + let final_name = clean + .as_os_str() + .to_str() + .context(error::PathUtf8 { path: &clean })? + .to_string(); + + // Check again to make sure we didn't end up with an empty string. + ensure!( + !final_name.is_empty(), + error::UnsafeTargetNameEmpty { name } + ); + + ensure!(final_name != "/", error::UnsafeTargetNameSlash { name }); + + Ok(final_name) +} + +#[test] +fn simple_1() { + let name = "/absolute/path/is/ok.txt"; + let actual = clean_name(name).unwrap(); + let expected = name; + assert_eq!(expected, &actual); +} + +#[test] +fn simple_2() { + let name = "relative/path/is/ok.txt"; + let actual = clean_name(name).unwrap(); + let expected = name; + assert_eq!(expected, &actual); +} + +#[test] +fn simple_3() { + let name = "not-path-like.txt"; + let actual = clean_name(name).unwrap(); + let expected = name; + assert_eq!(expected, &actual); +} + +#[test] +fn resolved_1() { + let name = "/this/../is/ok.txt"; + let actual = clean_name(name).unwrap(); + let expected = "/is/ok.txt"; + assert_eq!(expected, &actual); +} + +#[test] +fn resolved_2() { + let name = "../x"; + let actual = clean_name(name).unwrap(); + let expected = "x"; + assert_eq!(expected, &actual); +} + +#[test] +fn resolved_3() { + let name = "../../x"; + let actual = clean_name(name).unwrap(); + let expected = "x"; + assert_eq!(expected, &actual); +} + +#[test] +fn resolved_4() { + let name = "/../x"; + let actual = clean_name(name).unwrap(); + let expected = "/x"; + assert_eq!(expected, &actual); +} + +#[test] +fn resolved_5() { + let name = "/../../x"; + let actual = clean_name(name).unwrap(); + let expected = "/x"; + assert_eq!(expected, &actual); +} + +#[test] +fn resolved_6() { + let name = "/this/../../../../is/ok.txt"; + let actual = clean_name(name).unwrap(); + let expected = "/is/ok.txt"; + assert_eq!(expected, &actual); +} + +#[test] +fn resolved_7() { + let name = "foo"; + let actual = clean_name(name).unwrap(); + let expected = name; + assert_eq!(expected, &actual); +} + +#[test] +fn resolved_8() { + let name = "/foo"; + let actual = clean_name(name).unwrap(); + let expected = name; + assert_eq!(expected, &actual); +} + +#[test] +fn uncleaned_1() { + let name = r#"~/\.\."#; + let actual = clean_name(name).unwrap(); + let expected = name; + assert_eq!(expected, &actual); +} + +#[test] +fn uncleaned_2() { + let name = r#"funky\/\.\.\/name"#; + let actual = clean_name(name).unwrap(); + let expected = name; + assert_eq!(expected, &actual); +} + +#[test] +fn uncleaned_3() { + let name = "/weird/\\..\\/path"; + let actual = clean_name(name).unwrap(); + let expected = name; + assert_eq!(expected, &actual); +} + +#[test] +fn bad_1() { + let name = ".."; + let error = clean_name(name).err().unwrap(); + assert!(matches!(error, error::Error::UnsafeTargetNameDotDot { .. })); +} + +#[test] +fn bad_2() { + let name = "../"; + let error = clean_name(name).err().unwrap(); + assert!(matches!(error, error::Error::UnsafeTargetNameEmpty { .. })); +} + +#[test] +fn bad_3() { + let name = "/.."; + let error = clean_name(name).err().unwrap(); + assert!(matches!(error, error::Error::UnsafeTargetNameSlash { .. })); +} diff --git a/tough/src/transport.rs b/tough/src/transport.rs index 9b621997..2f377f49 100644 --- a/tough/src/transport.rs +++ b/tough/src/transport.rs @@ -4,6 +4,7 @@ use dyn_clone::DynClone; use std::error::Error; use std::fmt::{Debug, Display, Formatter}; use std::io::{ErrorKind, Read}; +use std::path::PathBuf; use url::Url; /// A trait to abstract over the method/protocol by which files are obtained. @@ -148,14 +149,10 @@ impl Transport for FilesystemTransport { )); } - // Convert the file URL into a file path - let file_path = &url.to_file_path().map_err(|_e| { - TransportError::new_with_cause( - TransportErrorKind::Other, - &url, - "unable to get filepath from URL".to_string(), - ) - })?; + // Convert the file URL into a file path. We need to use url.path() and not + // url.to_file_path() because to_file_path will decode the percent encoding which could + // restore path traversal characters. + let file_path = PathBuf::from(url.path()); // And open the file let f = std::fs::File::open(file_path).map_err(|e| { diff --git a/tough/tests/data/consistent-snapshots/metadata/1.root.json b/tough/tests/data/consistent-snapshots/metadata/1.root.json new file mode 100644 index 00000000..de749779 --- /dev/null +++ b/tough/tests/data/consistent-snapshots/metadata/1.root.json @@ -0,0 +1,50 @@ +{ + "signed": { + "_type": "root", + "spec_version": "1.0.0", + "consistent_snapshot": true, + "version": 1, + "expires": "2999-01-01T00:00:00Z", + "keys": { + "69f069cf595e3f09cbe99a9f0f82127f7c7d2fde859e30fd5c0f2b4fc9c4a507": { + "keytype": "rsa", + "keyval": { + "public": "-----BEGIN PUBLIC KEY-----\nMIIBojANBgkqhkiG9w0BAQEFAAOCAY8AMIIBigKCAYEAnL6u6Q9Q6pg1G5020a83\nGlH/aFUO0PQ5leIpwWL8kWgpaWuUG7oRlOUG2/4cwN5FCvJJGXqU5AtSKq2fZ42J\n5XR9QMip4Pg0Q6mE8XCvAXAoMnkWSchdzgT2GoEntaOeRRTCUGb/DsVoxsVXjV6m\nFaRMx7nh8ggshMWgTYgTUDK+CSIBCcBWapCFq1BrM60XZmGTqeAuHSHaUUuF9G3b\ngOflH5L9IpQkaHWbJtGvyKLr53mhWO2r8BPR3+CtNZojAnkwmu4lA94k8C7TLMdc\nutzU4OzODe9UPERc33lRv8DBgsH3F077ZQwv/ikZXWSlACTDWZwenncCEwqdeDd4\n+q2AHyqxRN7bUAh57mUN+kFd3SS/4T44sfBrJw6N4JV/mE+/YfRLWtpIKIsXnBCb\nrC+dt96Vqz6g6eVVvqPwhOCSKcYsmp/iS6qwVn0Dq2SCrGG1FTmBjeA9ZkcjZhUG\nQEMyMNhoS+U2Nx5oIEIq2kREpuu+KsBSTUaOgR07WNUxAgMBAAE=\n-----END PUBLIC KEY-----" + }, + "scheme": "rsassa-pss-sha256" + } + }, + "roles": { + "targets": { + "keyids": [ + "69f069cf595e3f09cbe99a9f0f82127f7c7d2fde859e30fd5c0f2b4fc9c4a507" + ], + "threshold": 1 + }, + "snapshot": { + "keyids": [ + "69f069cf595e3f09cbe99a9f0f82127f7c7d2fde859e30fd5c0f2b4fc9c4a507" + ], + "threshold": 1 + }, + "timestamp": { + "keyids": [ + "69f069cf595e3f09cbe99a9f0f82127f7c7d2fde859e30fd5c0f2b4fc9c4a507" + ], + "threshold": 1 + }, + "root": { + "keyids": [ + "69f069cf595e3f09cbe99a9f0f82127f7c7d2fde859e30fd5c0f2b4fc9c4a507" + ], + "threshold": 1 + } + } + }, + "signatures": [ + { + "keyid": "69f069cf595e3f09cbe99a9f0f82127f7c7d2fde859e30fd5c0f2b4fc9c4a507", + "sig": "3550ab1882e31e18748c85d4f259bf2f84961affb7662988f862bf20a97785564937342c6e64caeb3855bf51bc01c2235cd442a5c501d2d002aa4147a80116a6c8bd26f6cf5c668500651f175dfacf71a33614abd0ab9d4fa226b14a301a4b5466e208bfd910162a133aa42716bfe8eac0a5a89da8b06faa2efc8e0c8003ba57828df23eda75b60db2c2e665da7639f9005fa6c320627aabcbd0ee13acf7e4247c0a4cc55dcb2a1a6277cad7a4a43addd5aa9c5a403cc80dfd0a2c09370bfd124bceda0e576d5ec3a62784109f9bd636c4f7e419aa742497a65fcfaeb81c75fe186094d4602205ae50f516fe351bb9698959fcc32f459887c8f40b40bcaddca0327d20223d32caa5f23f642177d0f76374fb9c2244187b8cf27ee050dec84d77400df8b957faadb9a93f7729f651832ef103603d12a50daa1606928ceb75e51cd918c6075ec1d7a761a45923a358e20932ba8c8313778643c91ac357d8f9eb0459a4ba305709b8edd5830956bf65204b9abfbca5f78474ba044efebe32d7137a" + } + ] +} diff --git a/tough/tests/data/consistent-snapshots/metadata/1.snapshot.json b/tough/tests/data/consistent-snapshots/metadata/1.snapshot.json new file mode 100644 index 00000000..8d9389fa --- /dev/null +++ b/tough/tests/data/consistent-snapshots/metadata/1.snapshot.json @@ -0,0 +1,23 @@ +{ + "signed": { + "_type": "snapshot", + "spec_version": "1.0.0", + "version": 1, + "expires": "2999-01-01T00:00:00Z", + "meta": { + "targets.json": { + "length": 1458, + "hashes": { + "sha256": "99b6979f3593b8fd0a9afd3ee3583e0565f3ecf399823e9c90557771c5a58b66" + }, + "version": 1 + } + } + }, + "signatures": [ + { + "keyid": "69f069cf595e3f09cbe99a9f0f82127f7c7d2fde859e30fd5c0f2b4fc9c4a507", + "sig": "971b8c5faa0edcc1de2e649452f3d8657dacc5d96746ac17f2b7c6ce901be2049ba6abe758f79a2cd1b09f88d55163271c2586c9adb7f96e3b82ac1173bd406190f14090e2270274b8c7b0ec91de595ba2853254d6a6322d862e0f47f77a679ffcd56ed4f54da0cb472088e7c8eb39ad0b7538f02544112e5cf5a2c5fb8e9e9b2f3c53ae9f0238437b62efc4f97b3d23b6ba7fd2b001bd29ef0b668ac66cf0e30697d7dd608f9d97d4317d2ea9c2c766978429ff7c958b05ae841fe0a7ffac9c60c7e48b3cea9eca1f845c6633b5c7aab006e78431817efbce9725cf3674e2591276c9e77b70b5787be180e9a204fd21ae7be30ec51b2d80a2f6c7b2daeb41c869c71b4a8662f1ad3e131de2cc6f18755fd1623be834e5735871c37151c305e8a136a8676f0b514dd396a329784109a0b4dbc8f97f73dd9aa7fbc3962af516088e172afacc13efa35b3de13d059cff8afcc9accda7508a8c3542ec0e918f8a546dc60257abc6389b50746a1383007b82ac09c0ddb833cf38f88ec3fc287c1ab7" + } + ] +} diff --git a/tough/tests/data/consistent-snapshots/metadata/1.targets.json b/tough/tests/data/consistent-snapshots/metadata/1.targets.json new file mode 100644 index 00000000..44226304 --- /dev/null +++ b/tough/tests/data/consistent-snapshots/metadata/1.targets.json @@ -0,0 +1,32 @@ +{ + "signed": { + "_type": "targets", + "spec_version": "1.0.0", + "version": 1, + "expires": "2999-01-01T00:00:00Z", + "targets": { + "data1.txt": { + "length": 14, + "hashes": { + "sha256": "5aa1d2b3bea034a0f9d0b27a1bc72919b3145a2b092b72ac0415a05e07e2bdd1" + } + }, + "data2.txt": { + "length": 14, + "hashes": { + "sha256": "732b0c04a45c1296a7adf26814d2622c288e5ae1ce0cd791da84aea5a745081c" + } + } + }, + "delegations": { + "keys": {}, + "roles": [] + } + }, + "signatures": [ + { + "keyid": "69f069cf595e3f09cbe99a9f0f82127f7c7d2fde859e30fd5c0f2b4fc9c4a507", + "sig": "585197fad043d64952af6139de776e3cc7eb95c4b1078a21b5da1d4badec3f36564ff7da2281355342e8e8dc7238170c7155130fdd54d2176447ca1646f21decbdbb513e4104d491001739a7a068b402360b1d6cf82b432b2441911891fcba817b698ec6a1127f48a3c940abe44bbb747942c93af75d08d527d3da0354935fa0e6b2576c177f5cc746e01ce8ade005558f406ba3b5f79e03d51ac7417ff416b0f9c7ddce2663aa388de8451480a97373b4cd653b25e026ec9d9f1a8a9923bb6bf43aa39765ea01296b3bf3f9879d478165dcea5706f94aff1e8ef6505342128cd6eea78fe51e030e135d3fc322dfd59674dee76abc8881cc0933489e686c37add6bc85cbbcf549f61ea4d531a0c24497951a85dff50a59b810b1ea2511bad84202cb5ee4bc67fc7716595192192a55c97651a6e5f1e855df587a4e0c7d31c2e530b8fe32cee627e638ec7e76069c0174562418007479448f7d05c0074d125f77c0fc440df08f37f2a12e5e16174580e3af8ee0252c30b82c7eedda7db137ac96" + } + ] +} diff --git a/tough/tests/data/consistent-snapshots/metadata/timestamp.json b/tough/tests/data/consistent-snapshots/metadata/timestamp.json new file mode 100644 index 00000000..ac61d6d6 --- /dev/null +++ b/tough/tests/data/consistent-snapshots/metadata/timestamp.json @@ -0,0 +1,23 @@ +{ + "signed": { + "_type": "timestamp", + "spec_version": "1.0.0", + "version": 1, + "expires": "2999-01-01T00:00:00Z", + "meta": { + "snapshot.json": { + "length": 1250, + "hashes": { + "sha256": "9717070e2c5a2d1757e11ca830863897eb2efd8b0b3483c72d5d2010bcc0d12e" + }, + "version": 1 + } + } + }, + "signatures": [ + { + "keyid": "69f069cf595e3f09cbe99a9f0f82127f7c7d2fde859e30fd5c0f2b4fc9c4a507", + "sig": "31e6d6455a866b27a79108e52af4800533e0f6fc34f1667caf6a624823d02bb884226c5e8403b87b3af64622c63214789a8de9098db7df1ffc34fc89cdac8cbcb99885d0ca4a8b37d214a6d9ebbe3a68d11faeb28316372573ab1e2e4bce7f7f8d52d397a8d3c7e4813864f33eca3b726c66f4fa3c48acbd5e6b870ac03b07d5a27b7ea6762a6672eb21fdccf13cef9f899fbcf7b821b071f75f1bc6f963d94545bb6979bfdd1482daab2454fa1c9d8880f53ae096c8e3e0797a1152218738a8eee3615c11328a14be3f4c81c55f40cf6a4217797feb85f42d1e2e007460982c7d423d83d7d4129733aa2fabe9756f415f03bf2d31d49d7fde58b0865dd4386b75235c315335710ed159cd9407b1633da8406bd3c5b3c8bfec9f9a3636ceb69d066448ddf19910f3844cb9a7f63c8d84abecd188f8c354e6cf6347c0e37129ff969b4467a5a77eb4366f8f33e30aaaf190d21c99f6a277e069b02838f6e5203012fe4f88bf6e17c1bd3e3d8efe8690bfd341b883e6e41c9351d0d9b8a3630b0a" + } + ] +} diff --git a/tough/tests/data/consistent-snapshots/targets/5aa1d2b3bea034a0f9d0b27a1bc72919b3145a2b092b72ac0415a05e07e2bdd1.data1.txt b/tough/tests/data/consistent-snapshots/targets/5aa1d2b3bea034a0f9d0b27a1bc72919b3145a2b092b72ac0415a05e07e2bdd1.data1.txt new file mode 100644 index 00000000..725c930e --- /dev/null +++ b/tough/tests/data/consistent-snapshots/targets/5aa1d2b3bea034a0f9d0b27a1bc72919b3145a2b092b72ac0415a05e07e2bdd1.data1.txt @@ -0,0 +1,4 @@ +123 +456 +789 +0 diff --git a/tough/tests/data/consistent-snapshots/targets/732b0c04a45c1296a7adf26814d2622c288e5ae1ce0cd791da84aea5a745081c.data2.txt b/tough/tests/data/consistent-snapshots/targets/732b0c04a45c1296a7adf26814d2622c288e5ae1ce0cd791da84aea5a745081c.data2.txt new file mode 100644 index 00000000..5fbdc068 --- /dev/null +++ b/tough/tests/data/consistent-snapshots/targets/732b0c04a45c1296a7adf26814d2622c288e5ae1ce0cd791da84aea5a745081c.data2.txt @@ -0,0 +1,4 @@ +abc +def +hij +k diff --git a/tough/tests/data/dubious-role-names/metadata/%E1%9A%A9%20os%2C%20%E1%9A%B1%20rad%2C%20%E1%9A%B3%20cen%2C%20%E1%9A%B7%20gyfu%2C%20%E1%9A%B9%20%C6%BFynn%2C%20%E1%9A%BB%20h%C3%A6gl%2C%20....json b/tough/tests/data/dubious-role-names/metadata/%E1%9A%A9%20os%2C%20%E1%9A%B1%20rad%2C%20%E1%9A%B3%20cen%2C%20%E1%9A%B7%20gyfu%2C%20%E1%9A%B9%20%C6%BFynn%2C%20%E1%9A%BB%20h%C3%A6gl%2C%20....json new file mode 100644 index 00000000..046d0bb9 --- /dev/null +++ b/tough/tests/data/dubious-role-names/metadata/%E1%9A%A9%20os%2C%20%E1%9A%B1%20rad%2C%20%E1%9A%B3%20cen%2C%20%E1%9A%B7%20gyfu%2C%20%E1%9A%B9%20%C6%BFynn%2C%20%E1%9A%BB%20h%C3%A6gl%2C%20....json @@ -0,0 +1,27 @@ +{ + "signed": { + "_type": "targets", + "spec_version": "1.0.0", + "version": 3, + "expires": "3021-01-27T00:56:48.450414Z", + "targets": {}, + "delegations": { + "keys": { + "0f432a9dfff85a943dd7a4fb7ff3221ad6495ef81d0342cb74094c208aa8934b": { + "keytype": "rsa", + "keyval": { + "public": "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAq0Q5xtn047yiCQlsZELR\nnUbk5tnIXEresMBpxu9NC5t2ywjhA/EQ22aX6Sy85PAvSUlE+DBIbLmm5o0EPbF8\n7e6EkZxj5Nz3O/UYMCgqpLE1bNIFSZzQNXOAfYqWsTE9rIQpJnZfpmPSruZ95xdN\nZsXh6rCdM3HfSpID+hE3Mq97dehdoW18DGxnorkuzTTLD9oA+Wz+Ctq1wpmBKraH\npkI8Q4QQ0ej74dEgEXxdLlAjFnEWmU/yTwUoa5hXtYcwq7MB/haT9DkUmeI4Wyk7\n5gamMun1tFgnEXso+YePUQg2ySMam0/nWVCbVMqVBveWk+TawT5Z8SytNKXQTCNS\nswIDAQAB\n-----END PUBLIC KEY-----" + }, + "scheme": "rsassa-pss-sha256" + } + }, + "roles": [] + } + }, + "signatures": [ + { + "keyid": "0f432a9dfff85a943dd7a4fb7ff3221ad6495ef81d0342cb74094c208aa8934b", + "sig": "4aba04bd0217a205b1d799a1b7b7163b46328e2192cc63a992b04858f5f6b4709dbe585f916141ba7ffe15f07915ae9d853a79c106dadb0782fd368842226c88b27de15f3a12e42a83c5e1332cb889dba5b833976c42f4007cc3606ef3a84851c92318f5fde2b1e8a6c99e8dc22f8651703f16dc7d220013cb9da3226a9836e1143670208a45e91b6b405d24667d355637b2b0f8ada410f1e819443102a6b270e23c41222c001399142a8abb3bfe63dc6d313b7b345dbb918f71c5c3ad6a02295fe224387175350c87c9e356740aafbad09fceae6354acbcc214cd504607a6867848f5460cea87e1ba73945dba2b155f08b61b58d6ee55bdebc2327ac5e854c3" + } + ] +} diff --git a/tough/tests/data/dubious-role-names/metadata/%F0%9F%8D%BA%2F30.json b/tough/tests/data/dubious-role-names/metadata/%F0%9F%8D%BA%2F30.json new file mode 100644 index 00000000..f750bc59 --- /dev/null +++ b/tough/tests/data/dubious-role-names/metadata/%F0%9F%8D%BA%2F30.json @@ -0,0 +1,27 @@ +{ + "signed": { + "_type": "targets", + "spec_version": "1.0.0", + "version": 3, + "expires": "3021-01-27T00:56:42.752354Z", + "targets": {}, + "delegations": { + "keys": { + "0f432a9dfff85a943dd7a4fb7ff3221ad6495ef81d0342cb74094c208aa8934b": { + "keytype": "rsa", + "keyval": { + "public": "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAq0Q5xtn047yiCQlsZELR\nnUbk5tnIXEresMBpxu9NC5t2ywjhA/EQ22aX6Sy85PAvSUlE+DBIbLmm5o0EPbF8\n7e6EkZxj5Nz3O/UYMCgqpLE1bNIFSZzQNXOAfYqWsTE9rIQpJnZfpmPSruZ95xdN\nZsXh6rCdM3HfSpID+hE3Mq97dehdoW18DGxnorkuzTTLD9oA+Wz+Ctq1wpmBKraH\npkI8Q4QQ0ej74dEgEXxdLlAjFnEWmU/yTwUoa5hXtYcwq7MB/haT9DkUmeI4Wyk7\n5gamMun1tFgnEXso+YePUQg2ySMam0/nWVCbVMqVBveWk+TawT5Z8SytNKXQTCNS\nswIDAQAB\n-----END PUBLIC KEY-----" + }, + "scheme": "rsassa-pss-sha256" + } + }, + "roles": [] + } + }, + "signatures": [ + { + "keyid": "0f432a9dfff85a943dd7a4fb7ff3221ad6495ef81d0342cb74094c208aa8934b", + "sig": "2fb047a4d9263ca4e15b7767010d00ef1b5f71cac307b4299f3520f15ec715f1a7bd62dd2da4d4f3e8cf3e8b961ed022c937f5276db1f2769b6d0ac04879ecf2c3450bb9e4bb2905b9a199f1378493d6e14837fb911b3de998c5c5a4d0b5aaa16fe3005e44833ad19777af105ceb7840b6f1e63736c50a1e89cb8a88bb0bce800dffd3f5d00d01aec53c4057cd8de03be11a212b3680646e819f6d3ef9c4fd5de00437e61ee32fe78a1d331d5e6211a315fc8f622f2e7716738ba06e5a0fac1c932f5805ae60cca4f1651b2b11318c223c3205e55a69ff92d0027144843ca77f7cdf0cbf8a820f6e3f88f0896cf7ec2b896d63dcc69466b21f62c76c2ff0a39f" + } + ] +} diff --git a/tough/tests/data/dubious-role-names/metadata/..%2F..%2Fpath%2Flike%2Fdubious.json b/tough/tests/data/dubious-role-names/metadata/..%2F..%2Fpath%2Flike%2Fdubious.json new file mode 100644 index 00000000..f6919c55 --- /dev/null +++ b/tough/tests/data/dubious-role-names/metadata/..%2F..%2Fpath%2Flike%2Fdubious.json @@ -0,0 +1,27 @@ +{ + "signed": { + "_type": "targets", + "spec_version": "1.0.0", + "version": 3, + "expires": "3021-01-27T00:56:35.229228Z", + "targets": {}, + "delegations": { + "keys": { + "0f432a9dfff85a943dd7a4fb7ff3221ad6495ef81d0342cb74094c208aa8934b": { + "keytype": "rsa", + "keyval": { + "public": "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAq0Q5xtn047yiCQlsZELR\nnUbk5tnIXEresMBpxu9NC5t2ywjhA/EQ22aX6Sy85PAvSUlE+DBIbLmm5o0EPbF8\n7e6EkZxj5Nz3O/UYMCgqpLE1bNIFSZzQNXOAfYqWsTE9rIQpJnZfpmPSruZ95xdN\nZsXh6rCdM3HfSpID+hE3Mq97dehdoW18DGxnorkuzTTLD9oA+Wz+Ctq1wpmBKraH\npkI8Q4QQ0ej74dEgEXxdLlAjFnEWmU/yTwUoa5hXtYcwq7MB/haT9DkUmeI4Wyk7\n5gamMun1tFgnEXso+YePUQg2ySMam0/nWVCbVMqVBveWk+TawT5Z8SytNKXQTCNS\nswIDAQAB\n-----END PUBLIC KEY-----" + }, + "scheme": "rsassa-pss-sha256" + } + }, + "roles": [] + } + }, + "signatures": [ + { + "keyid": "0f432a9dfff85a943dd7a4fb7ff3221ad6495ef81d0342cb74094c208aa8934b", + "sig": "60dfadc88922c7301b8a83cdacfe7ac490fdf2b9d6049ad931507f078b9c5a31ce41130c440798ed3755809702ea9d5243d41de9edbcf7ce8501feb5b9ad607f00814d5ee0d94e216d74cdbbbc9383dc2639e602393c8b4adf73b3e18423ea5e6da713c59b97fd0c719e863a406cff0f1cf7938d44bf292a1dc20b0c6a85e17ae77060a8fac84d34d821e0ed0fc4a77143d7ec39afc59b305ca4633e012485d38aef69fb780937a1f7dfbccbee988d2370758e8e8765796c9d85db6fc794a8d792a184d49e60a0c543abb6fc792cc85ba4c4c59a38f3199c91778d2d8110a1435de864d45d778a2809479367317951ce257c7dc245fa61300487633363566067" + } + ] +} diff --git a/tough/tests/data/dubious-role-names/metadata/1.root.json b/tough/tests/data/dubious-role-names/metadata/1.root.json new file mode 100644 index 00000000..9b113208 --- /dev/null +++ b/tough/tests/data/dubious-role-names/metadata/1.root.json @@ -0,0 +1,50 @@ +{ + "signed": { + "_type": "root", + "spec_version": "1.0.0", + "consistent_snapshot": false, + "version": 1, + "expires": "3021-01-27T00:56:15Z", + "keys": { + "0f432a9dfff85a943dd7a4fb7ff3221ad6495ef81d0342cb74094c208aa8934b": { + "keytype": "rsa", + "keyval": { + "public": "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAq0Q5xtn047yiCQlsZELR\nnUbk5tnIXEresMBpxu9NC5t2ywjhA/EQ22aX6Sy85PAvSUlE+DBIbLmm5o0EPbF8\n7e6EkZxj5Nz3O/UYMCgqpLE1bNIFSZzQNXOAfYqWsTE9rIQpJnZfpmPSruZ95xdN\nZsXh6rCdM3HfSpID+hE3Mq97dehdoW18DGxnorkuzTTLD9oA+Wz+Ctq1wpmBKraH\npkI8Q4QQ0ej74dEgEXxdLlAjFnEWmU/yTwUoa5hXtYcwq7MB/haT9DkUmeI4Wyk7\n5gamMun1tFgnEXso+YePUQg2ySMam0/nWVCbVMqVBveWk+TawT5Z8SytNKXQTCNS\nswIDAQAB\n-----END PUBLIC KEY-----" + }, + "scheme": "rsassa-pss-sha256" + } + }, + "roles": { + "timestamp": { + "keyids": [ + "0f432a9dfff85a943dd7a4fb7ff3221ad6495ef81d0342cb74094c208aa8934b" + ], + "threshold": 1 + }, + "targets": { + "keyids": [ + "0f432a9dfff85a943dd7a4fb7ff3221ad6495ef81d0342cb74094c208aa8934b" + ], + "threshold": 1 + }, + "root": { + "keyids": [ + "0f432a9dfff85a943dd7a4fb7ff3221ad6495ef81d0342cb74094c208aa8934b" + ], + "threshold": 1 + }, + "snapshot": { + "keyids": [ + "0f432a9dfff85a943dd7a4fb7ff3221ad6495ef81d0342cb74094c208aa8934b" + ], + "threshold": 1 + } + } + }, + "signatures": [ + { + "keyid": "0f432a9dfff85a943dd7a4fb7ff3221ad6495ef81d0342cb74094c208aa8934b", + "sig": "a24efe8b7c35f6539cbc726030b295e01cb4a045e8b1d3b57af212aa51346ff5523e5a4282f704ecdcb2d5dc28ea710323d675b406bf85eb71b9b17678c43530357baac87e4bac45cc425fa2a06df8fdb29b42f73b627e2ea8131d0a2ac27d4acb27195fc4633596cd20c494ddc63fdbd6d7edd7fc14060018acf24d22afc5d105310ec4a8e620daee142985791859501f7885d98705354ee55d176449c2594815a5251872b1a95209dcea0056f982c26722183e36817f040bc6981491524bff711efe9ad403d7645b47e408585b6ac3b9778231a02c940d183d22668eb54d4e74daa741774c57450c790f823407c8695ad7ded0bd75e035f93f25e9710a5725" + } + ] +} diff --git a/tough/tests/data/dubious-role-names/metadata/snapshot.json b/tough/tests/data/dubious-role-names/metadata/snapshot.json new file mode 100644 index 00000000..85a816f6 --- /dev/null +++ b/tough/tests/data/dubious-role-names/metadata/snapshot.json @@ -0,0 +1,44 @@ +{ + "signed": { + "_type": "snapshot", + "spec_version": "1.0.0", + "version": 2, + "expires": "3021-01-27T00:57:00.468905Z", + "meta": { + "ᚩ os, ᚱ rad, ᚳ cen, ᚷ gyfu, ᚹ ƿynn, ᚻ hægl, ....json": { + "length": 1548, + "hashes": { + "sha256": "1f60219a41e50038468ab84e27a66e9c08674c6e064210acffe434a76de19dfb" + }, + "version": 3 + }, + "targets.json": { + "length": 2775, + "hashes": { + "sha256": "e94a1efb2c7fb59b80d4d88a4b2a1bd8955d0472105befb279f3dbf80c632255" + }, + "version": 2 + }, + "../../path/like/dubious.json": { + "length": 1548, + "hashes": { + "sha256": "67050f56e6d3d79a8393097b2f668fe1231299f97a015c81947d5caa06577a94" + }, + "version": 3 + }, + "🍺/30.json": { + "length": 1548, + "hashes": { + "sha256": "1909ba25ca102c175566b93e6b1b6d3fb55f2b5a1ef88d50bcabd3064eb7fcc6" + }, + "version": 3 + } + } + }, + "signatures": [ + { + "keyid": "0f432a9dfff85a943dd7a4fb7ff3221ad6495ef81d0342cb74094c208aa8934b", + "sig": "0d0a5ff0a6a80cf574657dcaac159675734587493cf91fb54302bb33a99ef1841a4fbf7dfdbb10a7c104d5b9830a15f742898a2e656dcab1956ddeaa11cc1eae7a8413681a7de24a51342a18d77c843536c6c40aa2f7ce935a769ec853da70589432a9dcd95d5a130324bfb88d8be951a706d99ef76cf1f2677a2dde5d13ea75067d80aecafa91d6f2509a5e6a93c1b4649d101117c24000f4909ff30836409da975b6222b5e117692d96bec5c189371915b555d5f07588acd6c73eb0a94d7cff54080e547f1e4003b36f8cf37f388f88473011ea034ced32d998c1e9210a919c36003991c1d5284e4a6225a3dc4e969a3f172990c94fecf19b19a8feaad7fd5" + } + ] +} diff --git a/tough/tests/data/dubious-role-names/metadata/targets.json b/tough/tests/data/dubious-role-names/metadata/targets.json new file mode 100644 index 00000000..f195b3d1 --- /dev/null +++ b/tough/tests/data/dubious-role-names/metadata/targets.json @@ -0,0 +1,74 @@ +{ + "signed": { + "_type": "targets", + "spec_version": "1.0.0", + "version": 2, + "expires": "3021-01-27T00:57:00.468918Z", + "targets": { + "2.txt": { + "length": 2, + "hashes": { + "sha256": "53c234e5e8472b6ac51c1ae1cab3fe06fad053beb8ebfd8977b010655bfdd3c3" + } + }, + "1.txt": { + "length": 2, + "hashes": { + "sha256": "4355a46b19d348dc2f57c046f8ef63d4538ebb936000f3c9ee954a27460dd865" + } + } + }, + "delegations": { + "keys": { + "0f432a9dfff85a943dd7a4fb7ff3221ad6495ef81d0342cb74094c208aa8934b": { + "keytype": "rsa", + "keyval": { + "public": "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAq0Q5xtn047yiCQlsZELR\nnUbk5tnIXEresMBpxu9NC5t2ywjhA/EQ22aX6Sy85PAvSUlE+DBIbLmm5o0EPbF8\n7e6EkZxj5Nz3O/UYMCgqpLE1bNIFSZzQNXOAfYqWsTE9rIQpJnZfpmPSruZ95xdN\nZsXh6rCdM3HfSpID+hE3Mq97dehdoW18DGxnorkuzTTLD9oA+Wz+Ctq1wpmBKraH\npkI8Q4QQ0ej74dEgEXxdLlAjFnEWmU/yTwUoa5hXtYcwq7MB/haT9DkUmeI4Wyk7\n5gamMun1tFgnEXso+YePUQg2ySMam0/nWVCbVMqVBveWk+TawT5Z8SytNKXQTCNS\nswIDAQAB\n-----END PUBLIC KEY-----" + }, + "scheme": "rsassa-pss-sha256" + } + }, + "roles": [ + { + "name": "../../path/like/dubious", + "keyids": [ + "0f432a9dfff85a943dd7a4fb7ff3221ad6495ef81d0342cb74094c208aa8934b" + ], + "threshold": 1, + "paths": [ + "foo" + ], + "terminating": false + }, + { + "name": "🍺/30", + "keyids": [ + "0f432a9dfff85a943dd7a4fb7ff3221ad6495ef81d0342cb74094c208aa8934b" + ], + "threshold": 1, + "paths": [ + "foo" + ], + "terminating": false + }, + { + "name": "ᚩ os, ᚱ rad, ᚳ cen, ᚷ gyfu, ᚹ ƿynn, ᚻ hægl, ...", + "keyids": [ + "0f432a9dfff85a943dd7a4fb7ff3221ad6495ef81d0342cb74094c208aa8934b" + ], + "threshold": 1, + "paths": [ + "foo" + ], + "terminating": false + } + ] + } + }, + "signatures": [ + { + "keyid": "0f432a9dfff85a943dd7a4fb7ff3221ad6495ef81d0342cb74094c208aa8934b", + "sig": "3bd8272207c4733e070795dce7e479eaae40c39443837f2bca17824a80ea62d8be74b23f2f7336bb310beb09199cc2120f934a1b5e34c3a296604825e7145d723b083aade5114c2ec1a4b10c070e386b6b9b95ae828e2a5072ce4b21f3f06129837965be17661c4295a4086c917da1bff443b29e877c3bcb8fc72a3c688c546718deb11b5e354a5de8ff46f568d75e26af870c5b606eda946903a1abc05022b9e12c2b0c2943daf04fa0ad98543ee8326fabb86720becce985e924cb308a84e4734562080ecf09e336c423b2e38701d4bdd0df56ffae8d7055897a73b2287b6dc9eec8cb1e3726ee563ac8de1de3b13ad2cf7206cc7f5e3e1d02d662a2bb71eb" + } + ] +} diff --git a/tough/tests/data/dubious-role-names/metadata/timestamp.json b/tough/tests/data/dubious-role-names/metadata/timestamp.json new file mode 100644 index 00000000..59912b03 --- /dev/null +++ b/tough/tests/data/dubious-role-names/metadata/timestamp.json @@ -0,0 +1,23 @@ +{ + "signed": { + "_type": "timestamp", + "spec_version": "1.0.0", + "version": 2, + "expires": "3021-01-27T00:57:00.468924Z", + "meta": { + "snapshot.json": { + "length": 1659, + "hashes": { + "sha256": "53cfa83b9ed82fee5da8db1d05efcd5555f3cbfc2624a63af1e2f93f4813bd2a" + }, + "version": 2 + } + } + }, + "signatures": [ + { + "keyid": "0f432a9dfff85a943dd7a4fb7ff3221ad6495ef81d0342cb74094c208aa8934b", + "sig": "925d2691a908b3a0bee425e04ba35e6616abab4976166b1c0459c138fef933abf3fa0f7fd52e778ff8fc67c2c9503804ee03bc02bdf2109ee25ca60e4fa5395938748dfa00b8f60abaca3fb51ed47f566e44dbc20a380246f9fbcc32612d239b159370a7d9c9b387deaa0c0cf18bfd3e5de510df7214c19b2f032eefadc9b0e64e106ad147a41d76aad0e27310771b86f3b3d50853dde91128c8b9429f380f8703ce624c616633fbfb8991618c5607f8fcd12e22909c3d3fccbad26981635decf1a64d1c6fad1cffc3c697b442ddd051a72a0b0065302bbcdedb9f43ab49371d22cfaa3c7fd323c7266c08049c60eed17690b5c833f99acd48071b16d69e8d5d" + } + ] +} diff --git a/tough/tests/data/dubious-role-names/targets/1.txt b/tough/tests/data/dubious-role-names/targets/1.txt new file mode 100644 index 00000000..d00491fd --- /dev/null +++ b/tough/tests/data/dubious-role-names/targets/1.txt @@ -0,0 +1 @@ +1 diff --git a/tough/tests/data/dubious-role-names/targets/2.txt b/tough/tests/data/dubious-role-names/targets/2.txt new file mode 100644 index 00000000..0cfbf088 --- /dev/null +++ b/tough/tests/data/dubious-role-names/targets/2.txt @@ -0,0 +1 @@ +2 diff --git a/tough/tests/data/safe-target-paths/metadata/1.root.json b/tough/tests/data/safe-target-paths/metadata/1.root.json new file mode 100644 index 00000000..4e3386fd --- /dev/null +++ b/tough/tests/data/safe-target-paths/metadata/1.root.json @@ -0,0 +1,50 @@ +{ + "signed": { + "_type": "root", + "spec_version": "1.0.0", + "consistent_snapshot": false, + "version": 1, + "expires": "2999-01-01T00:00:00Z", + "keys": { + "69f069cf595e3f09cbe99a9f0f82127f7c7d2fde859e30fd5c0f2b4fc9c4a507": { + "keytype": "rsa", + "keyval": { + "public": "-----BEGIN PUBLIC KEY-----\nMIIBojANBgkqhkiG9w0BAQEFAAOCAY8AMIIBigKCAYEAnL6u6Q9Q6pg1G5020a83\nGlH/aFUO0PQ5leIpwWL8kWgpaWuUG7oRlOUG2/4cwN5FCvJJGXqU5AtSKq2fZ42J\n5XR9QMip4Pg0Q6mE8XCvAXAoMnkWSchdzgT2GoEntaOeRRTCUGb/DsVoxsVXjV6m\nFaRMx7nh8ggshMWgTYgTUDK+CSIBCcBWapCFq1BrM60XZmGTqeAuHSHaUUuF9G3b\ngOflH5L9IpQkaHWbJtGvyKLr53mhWO2r8BPR3+CtNZojAnkwmu4lA94k8C7TLMdc\nutzU4OzODe9UPERc33lRv8DBgsH3F077ZQwv/ikZXWSlACTDWZwenncCEwqdeDd4\n+q2AHyqxRN7bUAh57mUN+kFd3SS/4T44sfBrJw6N4JV/mE+/YfRLWtpIKIsXnBCb\nrC+dt96Vqz6g6eVVvqPwhOCSKcYsmp/iS6qwVn0Dq2SCrGG1FTmBjeA9ZkcjZhUG\nQEMyMNhoS+U2Nx5oIEIq2kREpuu+KsBSTUaOgR07WNUxAgMBAAE=\n-----END PUBLIC KEY-----" + }, + "scheme": "rsassa-pss-sha256" + } + }, + "roles": { + "snapshot": { + "keyids": [ + "69f069cf595e3f09cbe99a9f0f82127f7c7d2fde859e30fd5c0f2b4fc9c4a507" + ], + "threshold": 1 + }, + "targets": { + "keyids": [ + "69f069cf595e3f09cbe99a9f0f82127f7c7d2fde859e30fd5c0f2b4fc9c4a507" + ], + "threshold": 1 + }, + "timestamp": { + "keyids": [ + "69f069cf595e3f09cbe99a9f0f82127f7c7d2fde859e30fd5c0f2b4fc9c4a507" + ], + "threshold": 1 + }, + "root": { + "keyids": [ + "69f069cf595e3f09cbe99a9f0f82127f7c7d2fde859e30fd5c0f2b4fc9c4a507" + ], + "threshold": 1 + } + } + }, + "signatures": [ + { + "keyid": "69f069cf595e3f09cbe99a9f0f82127f7c7d2fde859e30fd5c0f2b4fc9c4a507", + "sig": "9c835856f47dca75c63833581bd688ba397a2cade3df3a9356ee185e87db2831da21ac0fcd7f4530fb7cc8fb607f0a68be365e286cbcb04eb58acd0489e090531606b93b6397a0f91a59a58fd7089cb70e80f91f48819237cc464ad40914590c3ebcdcd9e9f57de917c014387eee1db06b7e3b2b1320eb1470ecafa347c3eeb213d7ca3586c492c3789e0e9a9b343838f4acc8141837b72af5f594bb5c845161e2ce8011ee561e6e467fbad6f2b53ebb2118b84132e58b6b777cf7f1674f8d403ab51616189c2e11c705c43abba88de7129d810a7c4d996d4ba995ac035a16f59ff958360c45608078e408796a815d65ee5906523074d5cfdda59a4ede34dce35336bd72a5a970c4a68436f3ae0c6f0685b519e7f66ac1970aa337efeb64db05f223c9ab02dda58cf85853c69932cac693a86358ec9ee93000aa0591e224f83d5b581dd6bbca0e7b639b0b773a688f9776aca91fc8434444aeb4717f8a8ab7901be048b1f1997720dc0b0e34ea4a8fb5317b14709cca7ab1683a6722785b94a3" + } + ] +} diff --git a/tough/tests/data/safe-target-paths/metadata/delegated.json b/tough/tests/data/safe-target-paths/metadata/delegated.json new file mode 100644 index 00000000..13f6c8b4 --- /dev/null +++ b/tough/tests/data/safe-target-paths/metadata/delegated.json @@ -0,0 +1,26 @@ +{ + "signed": { + "_type": "targets", + "spec_version": "1.0.0", + "version": 1, + "expires": "2999-01-01T00:00:00Z", + "targets": { + "../delegated/foo/../subdir/data3.txt": { + "length": 14, + "hashes": { + "sha256": "ca4fb8d92326eccc3bf37bb1b5cb2c57bb558c884db3180e6e3ab65631ed9cd2" + } + } + }, + "delegations": { + "keys": {}, + "roles": [] + } + }, + "signatures": [ + { + "keyid": "69f069cf595e3f09cbe99a9f0f82127f7c7d2fde859e30fd5c0f2b4fc9c4a507", + "sig": "2adaaef0b3e49a7bb88573e9fc6888543938c2e6784a6c464f59ba69804300da3a01fec06ddad9539f34722895bd799539c0e7645651abff6c1e8f7e4e4509f271989e92646b62ab14d39c7c2afba1dea350a9f019248b6972fca03992a1313f8a2b760c8c5c91a0216f5215a06123bfa39a195b6d3eb066c8796b68169679fbc21079f842c6dbf985eb3b1b54612f3c98a9300c291a7eec2dd16373fbabdedf6bf091a9edab5d9d36591a200aa1437f8cf238268798562d59edb6db1b1c75460a9f2a402d55e3e5d45286d51f6362b2daefa95cf7a3f99cec3fb086543cd15ab2ecd63469bb5bddaef50f9140b0af84fe37ed9fc30d6f019e94d0068006780258d9e8a643e17d34fe8f20ad19c7365f2b00c640c9c884a617d0b06c54203ca659799f33c0f4786e1dc0e8b6ab3650242e1d48ea5a0dae49ca0fda06b0ed597e2cdf9c141901d67997984e8816f744f54baaf29c2ff96e7afde644f364c9d175496f45fdb9a8584cd8289cf88337d4f7d968ecf9313725f362a7e05cd9873f55" + } + ] +} diff --git a/tough/tests/data/safe-target-paths/metadata/snapshot.json b/tough/tests/data/safe-target-paths/metadata/snapshot.json new file mode 100644 index 00000000..51ed745e --- /dev/null +++ b/tough/tests/data/safe-target-paths/metadata/snapshot.json @@ -0,0 +1,30 @@ +{ + "signed": { + "_type": "snapshot", + "spec_version": "1.0.0", + "version": 1, + "expires": "2999-01-01T00:00:00Z", + "meta": { + "delegated.json": { + "length": 1316, + "hashes": { + "sha256": "b6eba7d8126d2082cce72d23b150694b2ccf57ccb31bae18c3af44d152fbb46e" + }, + "version": 1 + }, + "targets.json": { + "length": 2640, + "hashes": { + "sha256": "9db65e955c42616481ff9439ed984a6da4b6cf0d7c19a2b176d7eaa7a5b38243" + }, + "version": 1 + } + } + }, + "signatures": [ + { + "keyid": "69f069cf595e3f09cbe99a9f0f82127f7c7d2fde859e30fd5c0f2b4fc9c4a507", + "sig": "9259ce071cd1bfc1c3823636632c61d1df94d96d2d69bdea83343b82ba25ff44cabdd64dfb072b47ba8f8acc3c7ec81f44794febab9b1b20158a8298d7b7e426fe47ef7efa38d9112c3433a597545d76b80cc409a45098cbf25f2fade0bc685fe5ceace937a9bd6e44a63be0c462e88ffae7ae083862d14aca60861d85c461bece6d80d440cc37fc1b99f213f576e0b7ac18d2aae76456f89608e4a16aa004ff8b30e6a48119471507af29136a27c652a5df645edb5c5934eca5937a4334992c65e5ec2139740332c47632825af24ebac9495f4d30273775832942ed1d3a220025c7907d8d237bf1bdda3a4eb37a7fcef59d464deb352d179d593d8a49d9d69f022d7feaf80b68aa1b62ce0d5cb4cc633cc6f7c9671742d1b439bc5e6bf0adecadb4109a29f37a582a12ac7334032571796e78364c0bdec873592f3bb20110498886b0dd24190c556920bd063789dd6f12f11481422d3a6ae03a53b7cf7ba0dd679135c087b5f4912e8356eaebfe930420f254c31289b5521f74f1d120f2e4cb" + } + ] +} diff --git a/tough/tests/data/safe-target-paths/metadata/targets.json b/tough/tests/data/safe-target-paths/metadata/targets.json new file mode 100644 index 00000000..9a813a55 --- /dev/null +++ b/tough/tests/data/safe-target-paths/metadata/targets.json @@ -0,0 +1,52 @@ +{ + "signed": { + "_type": "targets", + "spec_version": "1.0.0", + "version": 1, + "expires": "2999-01-01T00:00:00Z", + "targets": { + "foo/../bar/../baz/../data1.txt": { + "length": 14, + "hashes": { + "sha256": "5aa1d2b3bea034a0f9d0b27a1bc72919b3145a2b092b72ac0415a05e07e2bdd1" + } + }, + "foo/bar/baz/../data2.txt": { + "length": 14, + "hashes": { + "sha256": "732b0c04a45c1296a7adf26814d2622c288e5ae1ce0cd791da84aea5a745081c" + } + } + }, + "delegations": { + "keys": { + "69f069cf595e3f09cbe99a9f0f82127f7c7d2fde859e30fd5c0f2b4fc9c4a507": { + "keytype": "rsa", + "keyval": { + "public": "-----BEGIN PUBLIC KEY-----\nMIIBojANBgkqhkiG9w0BAQEFAAOCAY8AMIIBigKCAYEAnL6u6Q9Q6pg1G5020a83\nGlH/aFUO0PQ5leIpwWL8kWgpaWuUG7oRlOUG2/4cwN5FCvJJGXqU5AtSKq2fZ42J\n5XR9QMip4Pg0Q6mE8XCvAXAoMnkWSchdzgT2GoEntaOeRRTCUGb/DsVoxsVXjV6m\nFaRMx7nh8ggshMWgTYgTUDK+CSIBCcBWapCFq1BrM60XZmGTqeAuHSHaUUuF9G3b\ngOflH5L9IpQkaHWbJtGvyKLr53mhWO2r8BPR3+CtNZojAnkwmu4lA94k8C7TLMdc\nutzU4OzODe9UPERc33lRv8DBgsH3F077ZQwv/ikZXWSlACTDWZwenncCEwqdeDd4\n+q2AHyqxRN7bUAh57mUN+kFd3SS/4T44sfBrJw6N4JV/mE+/YfRLWtpIKIsXnBCb\nrC+dt96Vqz6g6eVVvqPwhOCSKcYsmp/iS6qwVn0Dq2SCrGG1FTmBjeA9ZkcjZhUG\nQEMyMNhoS+U2Nx5oIEIq2kREpuu+KsBSTUaOgR07WNUxAgMBAAE=\n-----END PUBLIC KEY-----" + }, + "scheme": "rsassa-pss-sha256" + } + }, + "roles": [ + { + "name": "delegated", + "keyids": [ + "69f069cf595e3f09cbe99a9f0f82127f7c7d2fde859e30fd5c0f2b4fc9c4a507" + ], + "threshold": 1, + "paths": [ + "delegated/*" + ], + "terminating": false + } + ] + } + }, + "signatures": [ + { + "keyid": "69f069cf595e3f09cbe99a9f0f82127f7c7d2fde859e30fd5c0f2b4fc9c4a507", + "sig": "037a918944ce4f8659a8a3f508140960488232b9414290741bec6afbb00ce9ebd2e3bf302365ee9a8271ad2884c19e460485bc7ab4a2a13997b4dc0185feabd379df998b217ab0b3a5422c929b7dbe98b227dddb38f4601cf805d7f7594df603d762a8d40cf1b940e13e7b40ca79448d2b629b555a300e1af61550eaff93a6f2d21d15db4ac782d5cd1e2423a8d8b716173923b683e08345ab1d924407fb4b843c9ab86925707aae92de4339d371861baf370202233b8a1a96185ecb219fb1500b6087d2744d20eaa26301c1bc2ae175107c9f77c5b786dd14c3efea5724c039abda455d71cd57d59d50a925b0107776f8480ba3b56b6543f707e7b56942748458165f0d6ecb4fe525812252dc13618b740f7eecc478f94c660fb05d1cbafa2740367637496d9ab9f59efda9abf3cb2de4d4b41e44fd59b4fe557b6ca40c6825f10740f3a51cf358ec6f7423c136541cd316a071c70a3eab0416143fea04e032d53b5e0f72f695b9048ff7042406b4890caa8b80e64110cd2bc6bbcf896b3f57" + } + ] +} diff --git a/tough/tests/data/safe-target-paths/metadata/timestamp.json b/tough/tests/data/safe-target-paths/metadata/timestamp.json new file mode 100644 index 00000000..08310550 --- /dev/null +++ b/tough/tests/data/safe-target-paths/metadata/timestamp.json @@ -0,0 +1,23 @@ +{ + "signed": { + "_type": "timestamp", + "spec_version": "1.0.0", + "version": 1, + "expires": "2999-01-01T00:00:00Z", + "meta": { + "snapshot.json": { + "length": 1448, + "hashes": { + "sha256": "e6207dd2cafe95a9f3380b752a453749df016f88b431f79f4f07425b60f8ab4c" + }, + "version": 1 + } + } + }, + "signatures": [ + { + "keyid": "69f069cf595e3f09cbe99a9f0f82127f7c7d2fde859e30fd5c0f2b4fc9c4a507", + "sig": "1368df2b0bdd0241cb4b2c45f5106ae6aed4e6c6a47bd1eaa8eadcd0f600fd635a171f56904436e84062892b8afc804a3071afaea6f4c1ac30e455e60f829c2907523686093707561c49ca4a1d1b0bfeb875bfaa6d83af57f74c8d15429e43f34b77e2f09bc5f1eddc655d33e271adf88fa15aa5de5aa2d60de2d387e07c8c8a94484f4fdeda4032e504648cd0a3fcdf51f1fecb0412d85e2575ff0f3dd9aaa0f2b020acad2182a8622307328d9cc7e29b6cf9507d3155916f459d516e7df2e89a7f39aab78a911216ceab189271dcc4191ef06e48b7ec85cc30fc2cc2590b734ac03527765f03deafeb7f718f6a6cc23675f759bab06840d4ae40ddce8968c7b9d681e4c62b49552b136e10ea79d7e966a605404b295ac4fd604608d817c6c68b9fabe0c7828a2cab812f2c1af3a8b21b11055871dc8e605a502c52d51441846009c7979cd14885723839b02a4f785baa7259b80cf0ed2d0bbdfcfd7081a21001d6cc2cd974e3ec92ab9d738a7570303e0b814ef77f23f4180cb92dd6881b0b" + } + ] +} diff --git a/tough/tests/data/safe-target-paths/targets/data1.txt b/tough/tests/data/safe-target-paths/targets/data1.txt new file mode 100644 index 00000000..725c930e --- /dev/null +++ b/tough/tests/data/safe-target-paths/targets/data1.txt @@ -0,0 +1,4 @@ +123 +456 +789 +0 diff --git a/tough/tests/data/safe-target-paths/targets/delegated/subdir/data3.txt b/tough/tests/data/safe-target-paths/targets/delegated/subdir/data3.txt new file mode 100644 index 00000000..f33ddd8e --- /dev/null +++ b/tough/tests/data/safe-target-paths/targets/delegated/subdir/data3.txt @@ -0,0 +1,4 @@ +!@# +$%^ +&*( +) diff --git a/tough/tests/data/safe-target-paths/targets/foo/bar/data2.txt b/tough/tests/data/safe-target-paths/targets/foo/bar/data2.txt new file mode 100644 index 00000000..5fbdc068 --- /dev/null +++ b/tough/tests/data/safe-target-paths/targets/foo/bar/data2.txt @@ -0,0 +1,4 @@ +abc +def +hij +k diff --git a/tough/tests/http.rs b/tough/tests/http.rs index 9cf9425c..a4155224 100644 --- a/tough/tests/http.rs +++ b/tough/tests/http.rs @@ -7,7 +7,7 @@ mod http_happy { use httptest::{matchers::*, responders::*, Expectation, Server}; use std::fs::File; use std::str::FromStr; - use tough::{DefaultTransport, HttpTransport, RepositoryLoader, Transport}; + use tough::{DefaultTransport, HttpTransport, RepositoryLoader, TargetName, Transport}; use url::Url; /// Set an expectation in a test HTTP server which serves a file from `tuf-reference-impl`. @@ -67,19 +67,21 @@ mod http_happy { .load() .unwrap(); + let file1 = TargetName::new("file1.txt").unwrap(); assert_eq!( - read_to_end(repo.read_target("file1.txt").unwrap().unwrap()), + read_to_end(repo.read_target(&file1).unwrap().unwrap()), &b"This is an example target file."[..] ); + let file2 = TargetName::new("file2.txt").unwrap(); assert_eq!( - read_to_end(repo.read_target("file2.txt").unwrap().unwrap()), + read_to_end(repo.read_target(&file2).unwrap().unwrap()), &b"This is an another example target file."[..] ); assert_eq!( repo.targets() .signed .targets - .get("file1.txt") + .get(&file1) .unwrap() .custom .get("file_permissions") diff --git a/tough/tests/interop.rs b/tough/tests/interop.rs index 08258e87..bd6e0e8f 100644 --- a/tough/tests/interop.rs +++ b/tough/tests/interop.rs @@ -4,7 +4,7 @@ use std::fs::File; use tempfile::TempDir; use test_utils::{dir_url, read_to_end, test_data}; -use tough::{FilesystemTransport, Limits, Repository, RepositoryLoader}; +use tough::{FilesystemTransport, Limits, Repository, RepositoryLoader, TargetName}; mod test_utils; @@ -27,19 +27,22 @@ fn test_tuf_reference_impl() { } fn assert_tuf_reference_impl(repo: &Repository) { + let file1 = TargetName::new("file1.txt").unwrap(); + let file2 = TargetName::new("file2.txt").unwrap(); + let file3 = TargetName::new("file3.txt").unwrap(); assert_eq!( - read_to_end(repo.read_target("file1.txt").unwrap().unwrap()), + read_to_end(repo.read_target(&file1).unwrap().unwrap()), &b"This is an example target file."[..] ); assert_eq!( - read_to_end(repo.read_target("file2.txt").unwrap().unwrap()), + read_to_end(repo.read_target(&file2).unwrap().unwrap()), &b"This is an another example target file."[..] ); assert_eq!( repo.targets() .signed .targets - .get("file1.txt") + .get(&file1) .unwrap() .custom .get("file_permissions") @@ -53,7 +56,7 @@ fn assert_tuf_reference_impl(repo: &Repository) { .delegations .as_ref() .unwrap() - .target_is_delegated(&"file3.txt".to_string())); + .target_is_delegated(&file3)); } /// Test that `tough` can process repositories generated by [`tuf`], the reference Python @@ -80,3 +83,32 @@ fn test_tuf_reference_impl_default_transport() { .unwrap(); assert_tuf_reference_impl(&repo); } + +/// Test that `tough` can load a repository that has some unusual delegate role names. This ensures +/// that percent encoded role names are handled correctly and that path traversal characters in a +/// role name do not cause `tough` to write outside of its datastore. +#[test] +fn test_dubious_role_name() { + let base = test_data().join("dubious-role-names"); + let datastore = TempDir::new().unwrap(); + + let repo = RepositoryLoader::new( + File::open(base.join("metadata").join("1.root.json")).unwrap(), + dir_url(base.join("metadata")), + dir_url(base.join("targets")), + ) + .datastore(datastore.path()) + .load() + .unwrap(); + + // Prove that the role name has path traversal characters. + let expected_rolename = "../../path/like/dubious"; + assert_eq!( + repo.delegated_role(expected_rolename).unwrap().name, + expected_rolename + ); + + // Prove that the the role's metadata filename has not been written outside of the datastore. + let expected_filename = "..%2F..%2Fpath%2Flike%2Fdubious.json"; + assert!(datastore.path().join(expected_filename).is_file()) +} diff --git a/tough/tests/repo_cache.rs b/tough/tests/repo_cache.rs index ef6e5a80..3238313c 100644 --- a/tough/tests/repo_cache.rs +++ b/tough/tests/repo_cache.rs @@ -5,8 +5,8 @@ use std::fs::File; use std::io::Read; use std::path::PathBuf; use tempfile::TempDir; -use test_utils::{dir_url, test_data}; -use tough::{Repository, RepositoryLoader}; +use test_utils::{dir_url, read_to_end, test_data, DATA_1, DATA_2}; +use tough::{Repository, RepositoryLoader, TargetName}; use url::Url; mod test_utils; @@ -72,8 +72,9 @@ fn test_repo_cache_all_targets() { // the copied repo should have file1 and file2 (i.e. all of targets). let mut file_data = Vec::new(); + let file1 = TargetName::new("file1.txt").unwrap(); let file_size = copied_repo - .read_target("file1.txt") + .read_target(&file1) .unwrap() .unwrap() .read_to_end(&mut file_data) @@ -81,8 +82,9 @@ fn test_repo_cache_all_targets() { assert_eq!(31, file_size); let mut file_data = Vec::new(); + let file2 = TargetName::new("file2.txt").unwrap(); let file_size = copied_repo - .read_target("file2.txt") + .read_target(&file2) .unwrap() .unwrap() .read_to_end(&mut file_data) @@ -121,8 +123,9 @@ fn test_repo_cache_list_of_two_targets() { // the copied repo should have file1 and file2 (i.e. all of the listed targets). let mut file_data = Vec::new(); + let file1 = TargetName::new("file1.txt").unwrap(); let file_size = copied_repo - .read_target("file1.txt") + .read_target(&file1) .unwrap() .unwrap() .read_to_end(&mut file_data) @@ -130,8 +133,9 @@ fn test_repo_cache_list_of_two_targets() { assert_eq!(31, file_size); let mut file_data = Vec::new(); + let file2 = TargetName::new("file2.txt").unwrap(); let file_size = copied_repo - .read_target("file2.txt") + .read_target(&file2) .unwrap() .unwrap() .read_to_end(&mut file_data) @@ -169,12 +173,14 @@ fn test_repo_cache_some() { .unwrap(); // the copied repo should have file2 but not file1 (i.e. only the listed targets). - let read_target_result = copied_repo.read_target("file1.txt"); + let file1 = TargetName::new("file1.txt").unwrap(); + let read_target_result = copied_repo.read_target(&file1); assert!(read_target_result.is_err()); let mut file_data = Vec::new(); + let file2 = TargetName::new("file2.txt").unwrap(); let file_size = copied_repo - .read_target("file2.txt") + .read_target(&file2) .unwrap() .unwrap() .read_to_end(&mut file_data) @@ -231,3 +237,65 @@ fn test_repo_cache_metadata_no_root_chain() { // Verify we did not cache the root.json assert!(!metadata_destination.join("1.root.json").exists()); } + +/// Test that the repo.cache() function prepends target names with sha digest. +#[test] +fn test_repo_cache_consistent_snapshots() { + let repo_name = "consistent-snapshots"; + let metadata_dir = test_data().join(repo_name).join("metadata"); + let targets_dir = test_data().join(repo_name).join("targets"); + let root = metadata_dir.join("1.root.json"); + let repo = RepositoryLoader::new( + File::open(&root).unwrap(), + dir_url(metadata_dir), + dir_url(targets_dir), + ) + .load() + .unwrap(); + + // cache the repo for future use + let destination = TempDir::new().unwrap(); + let metadata_destination = destination.as_ref().join("metadata"); + let targets_destination = destination.as_ref().join("targets"); + // let targets_subset = vec!["file2.txt".to_string()]; + repo.cache( + &metadata_destination, + &targets_destination, + Option::<&[&str]>::None, + true, + ) + .unwrap(); + + // check that we can load the copied repo. + let copied_repo = RepositoryLoader::new( + File::open(&root).unwrap(), + dir_url(&metadata_destination), + dir_url(&targets_destination), + ) + .load() + .unwrap(); + + // the copied repo should have file2 but not file1 (i.e. only the listed targets). + let data1 = String::from_utf8(read_to_end( + copied_repo + .read_target(&TargetName::new("data1.txt").unwrap()) + .unwrap() + .unwrap(), + )) + .unwrap(); + assert_eq!(data1, DATA_1); + + let data2 = String::from_utf8(read_to_end( + copied_repo + .read_target(&TargetName::new("data2.txt").unwrap()) + .unwrap() + .unwrap(), + )) + .unwrap(); + assert_eq!(data2, DATA_2); + + // assert that the target has its digest prepended + let expected_filepath = targets_destination + .join("5aa1d2b3bea034a0f9d0b27a1bc72919b3145a2b092b72ac0415a05e07e2bdd1.data1.txt"); + assert!(expected_filepath.is_file()) +} diff --git a/tough/tests/repo_editor.rs b/tough/tests/repo_editor.rs index 8bb9bc19..6053ab30 100644 --- a/tough/tests/repo_editor.rs +++ b/tough/tests/repo_editor.rs @@ -16,8 +16,8 @@ use tough::key_source::LocalKeySource; use tough::schema::decoded::Decoded; use tough::schema::decoded::Hex; use tough::schema::key::Key; -use tough::schema::PathSet; -use tough::{Repository, RepositoryLoader}; +use tough::schema::{PathPattern, PathSet}; +use tough::{Repository, RepositoryLoader, TargetName}; use url::Url; mod test_utils; @@ -165,7 +165,7 @@ fn create_sign_write_reload_repo() { .delegate_role( "role1", role1_key, - PathSet::Paths(["file?.txt".to_string()].to_vec()), + PathSet::Paths(vec![PathPattern::new("file?.txt").unwrap()]), NonZeroU64::new(1).unwrap(), Utc::now().checked_add_signed(Duration::days(21)).unwrap(), NonZeroU64::new(1).unwrap(), @@ -182,7 +182,7 @@ fn create_sign_write_reload_repo() { .delegate_role( "role2", role2_key, - PathSet::Paths(["file1.txt".to_string()].to_vec()), + PathSet::Paths(vec![PathPattern::new("file1.txt").unwrap()]), NonZeroU64::new(1).unwrap(), Utc::now().checked_add_signed(Duration::days(21)).unwrap(), NonZeroU64::new(1).unwrap(), @@ -191,7 +191,7 @@ fn create_sign_write_reload_repo() { .delegate_role( "role3", role1_key, - PathSet::Paths(["file1.txt".to_string()].to_vec()), + PathSet::Paths(vec![PathPattern::new("file1.txt").unwrap()]), NonZeroU64::new(1).unwrap(), Utc::now().checked_add_signed(Duration::days(21)).unwrap(), NonZeroU64::new(1).unwrap(), @@ -209,7 +209,7 @@ fn create_sign_write_reload_repo() { .delegate_role( "role4", role2_key, - PathSet::Paths(["file1.txt".to_string()].to_vec()), + PathSet::Paths(vec![PathPattern::new("file1.txt").unwrap()]), NonZeroU64::new(1).unwrap(), Utc::now().checked_add_signed(Duration::days(21)).unwrap(), NonZeroU64::new(1).unwrap(), @@ -292,7 +292,7 @@ fn create_role_flow() { .add_role( "A", metadata_base_url_out.as_str(), - PathSet::Paths(["*.txt".to_string()].to_vec()), + PathSet::Paths(vec![PathPattern::new("*.txt").unwrap()]), NonZeroU64::new(1).unwrap(), Some(key_hash_map(role1_key)), ) @@ -369,7 +369,7 @@ fn create_role_flow() { .add_role( "B", metadata_base_url_out.as_str(), - PathSet::Paths(["file?.txt".to_string()].to_vec()), + PathSet::Paths(vec![PathPattern::new("file?.txt").unwrap()]), NonZeroU64::new(1).unwrap(), Some(key_hash_map(role2_key)), ) @@ -492,7 +492,7 @@ fn update_targets_flow() { .add_role( "A", metadata_base_url_out.as_str(), - PathSet::Paths(["*.txt".to_string()].to_vec()), + PathSet::Paths(vec![PathPattern::new("*.txt").unwrap()]), NonZeroU64::new(1).unwrap(), Some(key_hash_map(role1_key)), ) @@ -569,7 +569,7 @@ fn update_targets_flow() { .add_role( "B", metadata_base_url_out.as_str(), - PathSet::Paths(["file?.txt".to_string()].to_vec()), + PathSet::Paths(vec![PathPattern::new("file?.txt").unwrap()]), NonZeroU64::new(1).unwrap(), Some(key_hash_map(role2_key)), ) @@ -713,8 +713,9 @@ fn update_targets_flow() { .load() .unwrap(); + let file1 = TargetName::new("file1.txt").unwrap(); assert_eq!( - read_to_end(new_repo.read_target("file1.txt").unwrap().unwrap()), + read_to_end(new_repo.read_target(&file1).unwrap().unwrap()), &b"This is an example target file."[..] ); @@ -799,8 +800,9 @@ fn update_targets_flow() { .load() .unwrap(); + let file1 = TargetName::new("file1.txt").unwrap(); assert_eq!( - read_to_end(new_repo.read_target("file1.txt").unwrap().unwrap()), + read_to_end(new_repo.read_target(&file1).unwrap().unwrap()), &b"Updated file1.txt"[..] ); } diff --git a/tough/tests/target_path_safety.rs b/tough/tests/target_path_safety.rs new file mode 100644 index 00000000..4a05dd27 --- /dev/null +++ b/tough/tests/target_path_safety.rs @@ -0,0 +1,177 @@ +mod test_utils; + +use chrono::{DateTime, TimeZone, Utc}; +use maplit::hashmap; +use ring::rand::SystemRandom; +use std::collections::HashMap; +use std::fs::{self, create_dir_all, File}; +use std::num::NonZeroU64; +use std::path::Path; +use tempfile::TempDir; +use test_utils::{dir_url, test_data, DATA_1, DATA_2, DATA_3}; +use tough::editor::signed::SignedRole; +use tough::editor::RepositoryEditor; +use tough::key_source::{KeySource, LocalKeySource}; +use tough::schema::{KeyHolder, PathPattern, PathSet, RoleKeys, RoleType, Root, Signed, Target}; +use tough::{Prefix, RepositoryLoader, TargetName}; + +/// Returns a date in the future when Rust programs will no longer exist. `MAX_DATETIME` is so huge +/// that it serializes to something weird-looking, so we use something that is recognizable to +/// humans as a date. +fn later() -> DateTime { + Utc.ymd(2999, 1, 1).and_hms(0, 0, 0) +} + +/// This test ensures that we can safely handle path-like target names with ../'s in them. +fn create_root(root_path: &Path, consistent_snapshot: bool) -> Vec> { + let keys: Vec> = vec![Box::new(LocalKeySource { + path: test_data().join("snakeoil.pem"), + })]; + + let key_pair = keys.iter().next().unwrap().as_sign().unwrap().tuf_key(); + let key_id = key_pair.key_id().unwrap(); + + let empty_keys = RoleKeys { + keyids: vec![key_id.clone()], + threshold: NonZeroU64::new(1).unwrap(), + _extra: Default::default(), + }; + + let mut root = Signed { + signed: Root { + spec_version: "1.0.0".into(), + consistent_snapshot, + version: NonZeroU64::new(1).unwrap(), + expires: later(), + keys: HashMap::new(), + roles: hashmap! { + RoleType::Root => empty_keys.clone(), + RoleType::Snapshot => empty_keys.clone(), + RoleType::Targets => empty_keys.clone(), + RoleType::Timestamp => empty_keys.clone(), + // RoleType::DelegatedTargets => empty_keys.clone(), + }, + _extra: HashMap::new(), + }, + signatures: Vec::new(), + }; + + root.signed.keys.insert(key_id.clone(), key_pair.clone()); + + let signed_root = SignedRole::new( + root.signed.clone(), + &KeyHolder::Root(root.signed.clone()), + &keys, + &SystemRandom::new(), + ) + .unwrap(); + + std::fs::write(&root_path, signed_root.buffer()).unwrap(); + + keys +} + +#[test] +fn safe_target_paths() { + let tempdir = TempDir::new().unwrap(); + let root_path = tempdir.path().join("root.json"); + let keys = create_root(&root_path, false); + let one = NonZeroU64::new(1).unwrap(); + + let mut editor = RepositoryEditor::new(&root_path).unwrap(); + editor + .snapshot_version(one) + .snapshot_expires(later()) + .timestamp_version(one) + .timestamp_expires(later()) + .delegate_role( + "delegated", + &keys, + PathSet::Paths(vec![PathPattern::new("delegated/*").unwrap()]), + one, + later(), + one, + ) + .unwrap(); + let repo_dir = tempdir.path().join("repo"); + let targets_dir = repo_dir.join("targets"); + fs::create_dir_all(targets_dir.join("foo/bar")).unwrap(); + fs::create_dir_all(targets_dir.join("delegated/subdir")).unwrap(); + let targets_file_1 = targets_dir.join("data1.txt"); + let targets_file_2 = targets_dir.join("foo/bar/data2.txt"); + let targets_file_3 = targets_dir.join("delegated/subdir/data3.txt"); + fs::write(&targets_file_1, DATA_1).unwrap(); + fs::write(&targets_file_2, DATA_2).unwrap(); + fs::write(&targets_file_3, DATA_3).unwrap(); + + let target_name_1 = TargetName::new("foo/../bar/../baz/../../../../data1.txt").unwrap(); + let target_1 = Target::from_path(&targets_file_1).unwrap(); + let target_name_2 = TargetName::new("foo/bar/baz/../data2.txt").unwrap(); + let target_2 = Target::from_path(&targets_file_2).unwrap(); + let target_name_3 = TargetName::new("../delegated/foo/../subdir/data3.txt").unwrap(); + let target_3 = Target::from_path(&targets_file_3).unwrap(); + + editor.add_target(target_name_1.clone(), target_1).unwrap(); + editor.add_target(target_name_2.clone(), target_2).unwrap(); + editor + .targets_version(one) + .unwrap() + .targets_expires(later()) + .unwrap() + .sign_targets_editor(&keys) + .unwrap() + .change_delegated_targets("delegated") + .unwrap() + .add_target(target_name_3.clone(), target_3) + .unwrap() + .targets_version(one) + .unwrap() + .targets_expires(later()) + .unwrap() + .sign_targets_editor(&keys) + .unwrap(); + + let signed_repo = editor.sign(&keys).unwrap(); + let metadata_dir = repo_dir.join("metadata"); + signed_repo.write(&metadata_dir).unwrap(); + + let loaded_repo = RepositoryLoader::new( + File::open(&root_path).unwrap(), + dir_url(&metadata_dir), + dir_url(&targets_dir), + ) + .load() + .unwrap(); + + let outdir = tempdir.path().join("outdir"); + create_dir_all(&outdir).unwrap(); + loaded_repo + .save_target(&target_name_1, &outdir, Prefix::None) + .unwrap(); + loaded_repo + .save_target(&target_name_2, &outdir, Prefix::None) + .unwrap(); + loaded_repo + .save_target(&target_name_3, &outdir, Prefix::None) + .unwrap(); + + // These might be created if we didn't safely clean the target names as paths. + assert!(!outdir.join("bar").exists()); + assert!(!outdir.join("baz").exists()); + assert!(!outdir.join("foo/bar/baz").exists()); + assert!(!outdir.join("../delegated/foo/../subdir/data3.txt").exists()); + + // The targets should end up at these paths. + assert_eq!( + fs::read_to_string(outdir.join("data1.txt")).unwrap(), + DATA_1 + ); + assert_eq!( + fs::read_to_string(outdir.join("foo/bar/data2.txt")).unwrap(), + DATA_2 + ); + assert_eq!( + fs::read_to_string(outdir.join("delegated/subdir/data3.txt")).unwrap(), + DATA_3 + ); +} diff --git a/tough/tests/test_utils.rs b/tough/tests/test_utils.rs index a08488de..268d5f03 100644 --- a/tough/tests/test_utils.rs +++ b/tough/tests/test_utils.rs @@ -1,14 +1,21 @@ // Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: MIT OR Apache-2.0 +// An integration test might want to use some, but not all of, the symbols herein. To do so would +// cause compiler warnings for unused code, so we suppress them. +#![allow(unused)] + use std::io::Read; use std::path::{Path, PathBuf}; use url::Url; /// Utilities for tests. Not every test module uses every function, so we suppress unused warnings. +pub const DATA_1: &str = "123\n456\n789\n0\n"; +pub const DATA_2: &str = "abc\ndef\nhij\nk\n"; +pub const DATA_3: &str = "!@#\n$%^\n&*(\n)\n"; + /// Returns the path to our test data directory -#[allow(unused)] pub fn test_data() -> PathBuf { PathBuf::from(env!("CARGO_MANIFEST_DIR")) .join("tests") @@ -16,13 +23,11 @@ pub fn test_data() -> PathBuf { } /// Converts a filepath into a URI formatted string -#[allow(unused)] pub fn dir_url>(path: P) -> Url { Url::from_directory_path(path).unwrap() } /// Gets the goods from a read and makes a Vec -#[allow(unused)] pub fn read_to_end(mut reader: R) -> Vec { let mut v = Vec::new(); reader.read_to_end(&mut v).unwrap(); diff --git a/tuftool/CHANGELOG.md b/tuftool/CHANGELOG.md index aed1c7e5..bc434cf2 100644 --- a/tuftool/CHANGELOG.md +++ b/tuftool/CHANGELOG.md @@ -4,6 +4,15 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [0.7.0] - 2021-10-19 +### Breaking Changes +- `tuftool download` now requires `outdir` to not exist. + +### Changes +- Fix an issue where delegated role names with path traversal constructs could cause files to be written in unexpected locations. +- Fix a similar issue with path traversal constructs in target names. +- Update dependencies. + ## [0.6.4] - 2021-09-15 ### Changes - Add ignore threshold flag. [#412] @@ -145,7 +154,8 @@ Major update: much of the logic in `tuftool` has been factored out and added to ### Added - Everything! -[Unreleased]: https://github.com/awslabs/tough/compare/tuftool-v0.6.4...develop +[Unreleased]: https://github.com/awslabs/tough/compare/tuftool-v0.7.0...develop +[0.7.0]: https://github.com/awslabs/tough/compare/tuftool-v0.6.4...tuftool-v0.7.0 [0.6.4]: https://github.com/awslabs/tough/compare/tuftool-v0.6.3...tuftool-v0.6.4 [0.6.3]: https://github.com/awslabs/tough/compare/tuftool-v0.6.2...tuftool-v0.6.3 [0.6.2]: https://github.com/awslabs/tough/compare/tuftool-v0.6.1...tuftool-v0.6.2 diff --git a/tuftool/Cargo.toml b/tuftool/Cargo.toml index c60e672d..0bcf265e 100644 --- a/tuftool/Cargo.toml +++ b/tuftool/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "tuftool" -version = "0.6.4" +version = "0.7.0" description = "Utility for creating and signing The Update Framework (TUF) repositories" authors = ["iliana destroyer of worlds "] license = "MIT OR Apache-2.0" @@ -36,9 +36,9 @@ snafu = { version = "0.6.10", features = ["backtraces-impl-backtrace-crate"] } structopt = "0.3" tempfile = "3.1.0" tokio = "~1.8" # LTS -tough = { version = "0.11.3", path = "../tough", features = ["http"] } -tough-ssm = { version = "0.6.3", path = "../tough-ssm" } -tough-kms = { version = "0.3.3", path = "../tough-kms" } +tough = { version = "0.12.0", path = "../tough", features = ["http"] } +tough-ssm = { version = "0.6.4", path = "../tough-ssm" } +tough-kms = { version = "0.3.4", path = "../tough-kms" } url = "2.1.0" walkdir = "2.3.2" diff --git a/tuftool/src/add_role.rs b/tuftool/src/add_role.rs index 24b78329..51e4f6df 100644 --- a/tuftool/src/add_role.rs +++ b/tuftool/src/add_role.rs @@ -12,7 +12,7 @@ use std::path::PathBuf; use structopt::StructOpt; use tough::editor::{targets::TargetsEditor, RepositoryEditor}; use tough::key_source::KeySource; -use tough::schema::PathSet; +use tough::schema::{PathHashPrefix, PathPattern, PathSet}; use url::Url; #[derive(Debug, StructOpt)] @@ -56,11 +56,11 @@ pub(crate) struct AddRoleArgs { /// The delegated paths #[structopt(short = "p", long = "paths", conflicts_with = "path-hash-prefixes")] - paths: Option>, + paths: Option>, /// The delegated paths hash prefixes #[structopt(short = "hp", long = "path-hash-prefixes")] - path_hash_prefixes: Option>, + path_hash_prefixes: Option>, /// Determines if entire repo should be signed #[structopt(long = "sign-all")] diff --git a/tuftool/src/create.rs b/tuftool/src/create.rs index f7c16729..62fb8ac1 100644 --- a/tuftool/src/create.rs +++ b/tuftool/src/create.rs @@ -100,9 +100,9 @@ impl CreateArgs { .timestamp_version(self.timestamp_version) .timestamp_expires(self.timestamp_expires); - for (filename, target) in targets { + for (target_name, target) in targets { editor - .add_target(&filename, target) + .add_target(target_name, target) .context(error::DelegationStructure)?; } diff --git a/tuftool/src/download.rs b/tuftool/src/download.rs index b21cb0aa..c0eb3358 100644 --- a/tuftool/src/download.rs +++ b/tuftool/src/download.rs @@ -3,13 +3,12 @@ use crate::download_root::download_root; use crate::error::{self, Result}; -use snafu::{OptionExt, ResultExt}; +use snafu::{ensure, ResultExt}; use std::fs::File; -use std::io; use std::num::NonZeroU64; use std::path::{Path, PathBuf}; use structopt::StructOpt; -use tough::{ExpirationEnforcement, Repository, RepositoryLoader}; +use tough::{ExpirationEnforcement, Prefix, Repository, RepositoryLoader, TargetName}; use url::Url; #[derive(Debug, StructOpt)] @@ -38,7 +37,7 @@ pub(crate) struct DownloadArgs { #[structopt(short = "n", long = "target-name")] target_names: Vec, - /// Output directory of targets + /// Output directory for targets (will be created and must not already exist) outdir: PathBuf, /// Allow repo download for expired metadata @@ -58,6 +57,12 @@ WARNING: `--allow-expired-repo` was passed; this is unsafe and will not establis impl DownloadArgs { pub(crate) fn run(&self) -> Result<()> { + // To help ensure that downloads are safe, we require that the outdir does not exist. + ensure!( + !self.outdir.exists(), + error::DownloadOutdirExists { path: &self.outdir } + ); + // use local root.json or download from repository let root_path = if let Some(path) = &self.root { PathBuf::from(path) @@ -90,21 +95,22 @@ impl DownloadArgs { } } -fn handle_download(repository: &Repository, outdir: &Path, target_names: &[String]) -> Result<()> { - let download_target = |target: &str| -> Result<()> { - let path = PathBuf::from(outdir).join(target); - println!("\t-> {}", &target); - let mut reader = repository - .read_target(target) - .context(error::Metadata)? - .context(error::TargetNotFound { target })?; - let mut f = File::create(&path).context(error::OpenFile { path: &path })?; - io::copy(&mut reader, &mut f).context(error::WriteTarget)?; +fn handle_download(repository: &Repository, outdir: &Path, raw_names: &[String]) -> Result<()> { + let target_names: Result> = raw_names + .iter() + .map(|s| TargetName::new(s).context(error::InvalidTargetName)) + .collect(); + let target_names = target_names?; + let download_target = |name: &TargetName| -> Result<()> { + println!("\t-> {}", name.raw()); + repository + .save_target(name, outdir, Prefix::None) + .context(error::Metadata)?; Ok(()) }; // copy requested targets, or all available targets if not specified - let targets = if target_names.is_empty() { + let targets: Vec = if target_names.is_empty() { repository .targets() .signed @@ -113,7 +119,7 @@ fn handle_download(repository: &Repository, outdir: &Path, target_names: &[Strin .cloned() .collect() } else { - target_names.to_owned() + target_names }; println!("Downloading targets to {:?}", outdir); diff --git a/tuftool/src/error.rs b/tuftool/src/error.rs index 74daafdd..a8eb7026 100644 --- a/tuftool/src/error.rs +++ b/tuftool/src/error.rs @@ -79,6 +79,9 @@ pub(crate) enum Error { backtrace: Backtrace, }, + #[snafu(display("The a file or directory already exists at '{}'", path.display()))] + DownloadOutdirExists { path: PathBuf, backtrace: Backtrace }, + #[snafu(display( "Failed to create a Repository Editor with root.json '{}': {}", path.display(), @@ -144,6 +147,9 @@ pub(crate) enum Error { backtrace: Backtrace, }, + #[snafu(display("Invalid target name: {}", source))] + InvalidTargetName { source: tough::error::Error }, + #[snafu(display("Failed to serialize to JSON: {}", source))] JsonSerialization { source: tough::schema::Error, diff --git a/tuftool/src/main.rs b/tuftool/src/main.rs index 299227fe..199e60c9 100644 --- a/tuftool/src/main.rs +++ b/tuftool/src/main.rs @@ -39,6 +39,7 @@ use std::path::Path; use structopt::StructOpt; use tempfile::NamedTempFile; use tough::schema::Target; +use tough::TargetName; use walkdir::WalkDir; static SPEC_VERSION: &str = "1.0.0"; @@ -127,7 +128,7 @@ where // Walk the directory specified, building a map of filename to Target structs. // Hashing of the targets is done in parallel -fn build_targets

(indir: P, follow_links: bool) -> Result> +fn build_targets

(indir: P, follow_links: bool) -> Result> where P: AsRef, { @@ -149,17 +150,19 @@ where .collect() } -fn process_target(path: &Path) -> Result<(String, Target)> { +fn process_target(path: &Path) -> Result<(TargetName, Target)> { + // Get the file name as a TargetName + let target_name = TargetName::new( + path.file_name() + .context(error::NoFileName { path })? + .to_str() + .context(error::PathUtf8 { path })?, + ) + .context(error::InvalidTargetName)?; + // Build a Target from the path given. If it is not a file, this will fail let target = Target::from_path(path).context(error::TargetFromPath { path })?; - // Get the file name as a string - let target_name = path - .file_name() - .context(error::NoFileName { path })? - .to_str() - .context(error::PathUtf8 { path })? - .to_owned(); Ok((target_name, target)) } diff --git a/tuftool/src/update.rs b/tuftool/src/update.rs index 97e3a72d..f71a53d7 100644 --- a/tuftool/src/update.rs +++ b/tuftool/src/update.rs @@ -151,9 +151,9 @@ impl UpdateArgs { let new_targets = build_targets(&targets_indir, self.follow)?; - for (filename, target) in new_targets { + for (target_name, target) in new_targets { editor - .add_target(&filename, target) + .add_target(target_name, target) .context(error::DelegationStructure)?; } }; diff --git a/tuftool/src/update_targets.rs b/tuftool/src/update_targets.rs index 7a9f9628..bf2541c8 100644 --- a/tuftool/src/update_targets.rs +++ b/tuftool/src/update_targets.rs @@ -92,8 +92,10 @@ impl UpdateTargetsArgs { let new_targets = build_targets(&targets_indir, self.follow)?; - for (filename, target) in new_targets { - editor.add_target(&filename, target); + for (target_name, target) in new_targets { + editor + .add_target(target_name, target) + .context(error::InvalidTargetName)?; } }; diff --git a/tuftool/tests/create_command.rs b/tuftool/tests/create_command.rs index 320dd044..ebc0611d 100644 --- a/tuftool/tests/create_command.rs +++ b/tuftool/tests/create_command.rs @@ -8,7 +8,7 @@ use chrono::{Duration, Utc}; use std::fs::File; use tempfile::TempDir; use test_utils::dir_url; -use tough::RepositoryLoader; +use tough::{RepositoryLoader, TargetName}; #[test] // Ensure we can read a repo created by the `tuftool` binary using the `tough` library @@ -65,16 +65,19 @@ fn create_command() { .unwrap(); // Ensure we can read the targets + let file1 = TargetName::new("file1.txt").unwrap(); assert_eq!( - test_utils::read_to_end(repo.read_target("file1.txt").unwrap().unwrap()), + test_utils::read_to_end(repo.read_target(&file1).unwrap().unwrap()), &b"This is an example target file."[..] ); + let file2 = TargetName::new("file2.txt").unwrap(); assert_eq!( - test_utils::read_to_end(repo.read_target("file2.txt").unwrap().unwrap()), + test_utils::read_to_end(repo.read_target(&file2).unwrap().unwrap()), &b"This is an another example target file."[..] ); + let file3 = TargetName::new("file3.txt").unwrap(); assert_eq!( - test_utils::read_to_end(repo.read_target("file3.txt").unwrap().unwrap()), + test_utils::read_to_end(repo.read_target(&file3).unwrap().unwrap()), &b"This is role1's target file."[..] ); @@ -82,9 +85,9 @@ fn create_command() { assert_eq!(repo.targets().signed.version.get(), targets_version); assert_eq!(repo.targets().signed.expires, targets_expiration); assert_eq!(repo.targets().signed.targets.len(), 3); - assert_eq!(repo.targets().signed.targets["file1.txt"].length, 31); - assert_eq!(repo.targets().signed.targets["file2.txt"].length, 39); - assert_eq!(repo.targets().signed.targets["file3.txt"].length, 28); + assert_eq!(repo.targets().signed.targets[&file1].length, 31); + assert_eq!(repo.targets().signed.targets[&file2].length, 39); + assert_eq!(repo.targets().signed.targets[&file3].length, 28); assert_eq!(repo.targets().signatures.len(), 1); // Ensure the snapshot.json file is correct diff --git a/tuftool/tests/create_repository_integration.rs b/tuftool/tests/create_repository_integration.rs index 4a3d99cc..a53f25b2 100644 --- a/tuftool/tests/create_repository_integration.rs +++ b/tuftool/tests/create_repository_integration.rs @@ -8,7 +8,7 @@ use std::env; use std::fs::File; use tempfile::TempDir; use test_utils::dir_url; -use tough::RepositoryLoader; +use tough::{RepositoryLoader, TargetName}; // This file include integration tests for KeySources: tough-ssm, tough-kms and local file key. // Since the tests are run using the actual "AWS SSM and AWS KMS", you would have to configure @@ -165,16 +165,19 @@ fn create_repository(root_key: &str, auto_generate: bool) { .unwrap(); // Ensure we can read the targets + let file1 = TargetName::new("file1.txt").unwrap(); assert_eq!( - test_utils::read_to_end(repo.read_target("file1.txt").unwrap().unwrap()), + test_utils::read_to_end(repo.read_target(&file1).unwrap().unwrap()), &b"This is an example target file."[..] ); + let file2 = TargetName::new("file2.txt").unwrap(); assert_eq!( - test_utils::read_to_end(repo.read_target("file2.txt").unwrap().unwrap()), + test_utils::read_to_end(repo.read_target(&file2).unwrap().unwrap()), &b"This is an another example target file."[..] ); + let file3 = TargetName::new("file3.txt").unwrap(); assert_eq!( - test_utils::read_to_end(repo.read_target("file3.txt").unwrap().unwrap()), + test_utils::read_to_end(repo.read_target(&file3).unwrap().unwrap()), &b"This is role1's target file."[..] ); @@ -182,9 +185,9 @@ fn create_repository(root_key: &str, auto_generate: bool) { assert_eq!(repo.targets().signed.version.get(), targets_version); assert_eq!(repo.targets().signed.expires, targets_expiration); assert_eq!(repo.targets().signed.targets.len(), 3); - assert_eq!(repo.targets().signed.targets["file1.txt"].length, 31); - assert_eq!(repo.targets().signed.targets["file2.txt"].length, 39); - assert_eq!(repo.targets().signed.targets["file3.txt"].length, 28); + assert_eq!(repo.targets().signed.targets[&file1].length, 31); + assert_eq!(repo.targets().signed.targets[&file2].length, 39); + assert_eq!(repo.targets().signed.targets[&file3].length, 28); assert_eq!(repo.targets().signatures.len(), 1); // Ensure the snapshot.json file is correct diff --git a/tuftool/tests/delegation_commands.rs b/tuftool/tests/delegation_commands.rs index a7ff4838..a6f917ba 100644 --- a/tuftool/tests/delegation_commands.rs +++ b/tuftool/tests/delegation_commands.rs @@ -9,7 +9,7 @@ use std::fs::File; use std::path::Path; use tempfile::TempDir; use test_utils::dir_url; -use tough::RepositoryLoader; +use tough::{RepositoryLoader, TargetName}; fn create_repo>(repo_dir: P) { let timestamp_expiration = Utc::now().checked_add_signed(Duration::days(1)).unwrap(); @@ -423,8 +423,9 @@ fn update_target_command() { .unwrap(); // Make sure we can read new target + let file4 = TargetName::new("file4.txt").unwrap(); assert_eq!( - test_utils::read_to_end(repo.read_target("file4.txt").unwrap().unwrap()), + test_utils::read_to_end(repo.read_target(&file4).unwrap().unwrap()), &b"This is an example target file."[..] ); } @@ -1430,3 +1431,233 @@ fn remove_role_recursive_command() { assert!(repo.delegated_role("A").is_none()); assert!(repo.delegated_role("B").is_none()); } + +#[test] +/// Ensure we that we percent encode path traversal characters when adding a role name such as +/// `../../strange/role/../name` and that we don't write files in unexpected places. +fn dubious_role_name() { + let dubious_role_name = "../../strange/role/../name"; + let dubious_name_encoded = "..%2F..%2Fstrange%2Frole%2F..%2Fname"; + let funny_role_name = "../🍺/( ͡° ͜ʖ ͡°)"; + let funny_name_encoded = + "..%2F%F0%9F%8D%BA%2F%28%20%CD%A1%C2%B0%20%CD%9C%CA%96%20%CD%A1%C2%B0%29"; + let root_json = test_utils::test_data().join("simple-rsa").join("root.json"); + let root_key = test_utils::test_data().join("snakeoil.pem"); + let targets_key = test_utils::test_data().join("targetskey"); + let targets_key1 = test_utils::test_data().join("targetskey-1"); + let repo_dir = TempDir::new().unwrap(); + + // Set new expiration dates and version numbers for the update command + let new_timestamp_expiration = Utc::now().checked_add_signed(Duration::days(4)).unwrap(); + let new_timestamp_version: u64 = 310; + let new_snapshot_expiration = Utc::now().checked_add_signed(Duration::days(5)).unwrap(); + let new_snapshot_version: u64 = 250; + let new_targets_expiration = Utc::now().checked_add_signed(Duration::days(6)).unwrap(); + let new_targets_version: u64 = 170; + + // Create a repo using tuftool and the reference tuf implementation data + create_repo(repo_dir.path()); + + // Set new expiration date for the new role + let expiration = Utc::now().checked_add_signed(Duration::days(4)).unwrap(); + let metadata_base_url = &dir_url(repo_dir.path().join("metadata")); + let meta_out = TempDir::new().unwrap(); + + // create role A + Command::cargo_bin("tuftool") + .unwrap() + .args(&[ + "delegation", + "--signing-role", + dubious_role_name, + "create-role", + "-o", + meta_out.path().to_str().unwrap(), + "-k", + targets_key.to_str().unwrap(), + "-e", + expiration.to_rfc3339().as_str(), + "-v", + "1", + ]) + .assert() + .success(); + + let new_repo_dir = TempDir::new().unwrap(); + // add role to targets metadata and sign entire repo + Command::cargo_bin("tuftool") + .unwrap() + .args(&[ + "delegation", + "--signing-role", + "targets", + "add-role", + "-o", + new_repo_dir.path().to_str().unwrap(), + "-i", + dir_url(&meta_out.path().join("metadata")).as_str(), + "-k", + root_key.to_str().unwrap(), + "--root", + root_json.to_str().unwrap(), + "--metadata-url", + metadata_base_url.as_str(), + "-e", + expiration.to_rfc3339().as_str(), + "--delegated-role", + dubious_role_name, + "-t", + "1", + "-v", + "2", + "--sign-all", + "--snapshot-expires", + new_snapshot_expiration.to_rfc3339().as_str(), + "--snapshot-version", + format!("{}", new_snapshot_version).as_str(), + "--timestamp-expires", + new_timestamp_expiration.to_rfc3339().as_str(), + "--timestamp-version", + format!("{}", new_timestamp_version).as_str(), + ]) + .assert() + .success(); + + // Load the updated repo + let updated_metadata_base_url = &dir_url(new_repo_dir.path().join("metadata")); + let updated_targets_base_url = &dir_url(new_repo_dir.path().join("targets")); + let repo = RepositoryLoader::new( + File::open(&root_json).unwrap(), + updated_metadata_base_url.clone(), + updated_targets_base_url.clone(), + ) + .load() + .unwrap(); + // Make sure `A` is added as a role + assert!(repo.delegated_role(dubious_role_name).is_some()); + + let create_out = TempDir::new().unwrap(); + // create role B + Command::cargo_bin("tuftool") + .unwrap() + .args(&[ + "delegation", + "--signing-role", + funny_role_name, + "create-role", + "-o", + create_out.path().to_str().unwrap(), + "-k", + targets_key1.to_str().unwrap(), + "-e", + expiration.to_rfc3339().as_str(), + "-v", + "1", + ]) + .assert() + .success(); + + let add_b_out = TempDir::new().unwrap(); + // add role B to A metadata and sign A meta + Command::cargo_bin("tuftool") + .unwrap() + .args(&[ + "delegation", + "--signing-role", + dubious_role_name, + "add-role", + "-o", + add_b_out.path().to_str().unwrap(), + "-i", + dir_url(&create_out.path().join("metadata")).as_str(), + "-k", + targets_key.to_str().unwrap(), + "--root", + root_json.to_str().unwrap(), + "--metadata-url", + updated_metadata_base_url.as_str(), + "-e", + expiration.to_rfc3339().as_str(), + "--delegated-role", + funny_role_name, + "-t", + "1", + "-v", + "2", + ]) + .assert() + .success(); + + // Make sure the metadata files are in the right directory + assert!(add_b_out + .path() + .join("metadata") + .join(format!("{}.json", dubious_name_encoded)) + .is_file()); + assert!(add_b_out + .path() + .join("metadata") + .join(format!("{}.json", funny_name_encoded)) + .is_file()); + + // update repo with new metadata + + let update_out = TempDir::new().unwrap(); + + // Update the repo we just created + Command::cargo_bin("tuftool") + .unwrap() + .args(&[ + "update", + "-o", + update_out.path().to_str().unwrap(), + "-k", + root_key.to_str().unwrap(), + "--root", + root_json.to_str().unwrap(), + "--metadata-url", + updated_metadata_base_url.as_str(), + "--targets-expires", + new_targets_expiration.to_rfc3339().as_str(), + "--targets-version", + format!("{}", new_targets_version).as_str(), + "--snapshot-expires", + new_snapshot_expiration.to_rfc3339().as_str(), + "--snapshot-version", + format!("{}", new_snapshot_version).as_str(), + "--timestamp-expires", + new_timestamp_expiration.to_rfc3339().as_str(), + "--timestamp-version", + format!("{}", new_timestamp_version).as_str(), + "--role", + dubious_role_name, + "-i", + dir_url(&add_b_out.path().join("metadata")).as_str(), + ]) + .assert() + .success(); + + // Load the updated repo + let repo = RepositoryLoader::new( + File::open(root_json).unwrap(), + dir_url(update_out.path().join("metadata")), + dir_url(update_out.path().join("targets")), + ) + .load() + .unwrap(); + + // Make sure `B` is added as a role + assert!(repo.delegated_role(funny_role_name).is_some()); + + // Make sure the metadata files are in the right directory + assert!(update_out + .path() + .join("metadata") + .join(format!("{}.{}.json", 2, dubious_name_encoded)) + .is_file()); + assert!(update_out + .path() + .join("metadata") + .join(format!("{}.{}.json", 1, funny_name_encoded)) + .is_file()); +} diff --git a/tuftool/tests/download_command.rs b/tuftool/tests/download_command.rs index ce2f570a..89b09584 100644 --- a/tuftool/tests/download_command.rs +++ b/tuftool/tests/download_command.rs @@ -3,8 +3,8 @@ mod test_utils; use assert_cmd::assert::Assert; use assert_cmd::Command; use httptest::{matchers::*, responders::*, Expectation, Server}; -use std::fs::{read_to_string, OpenOptions}; -use std::io::Write; +use std::fs::read_to_string; +use std::path::Path; use std::str::FromStr; use tempfile::TempDir; use url::Url; @@ -14,7 +14,7 @@ fn create_successful_get(relative_path: &str) -> httptest::Expectation { let repo_dir = test_utils::test_data().join("tuf-reference-impl"); let file_bytes = std::fs::read(&repo_dir.join(relative_path)).unwrap(); Expectation::matching(request::method_path("GET", format!("/{}", relative_path))) - .times(2) + .times(1) .respond_with( status_code(200) .append_header("content-type", "application/octet-stream") @@ -28,13 +28,13 @@ fn create_successful_get(relative_path: &str) -> httptest::Expectation { /// S3 returns `403 Forbidden` when requesting a file that does not exist. fn create_unsuccessful_get(relative_path: &str) -> httptest::Expectation { Expectation::matching(request::method_path("GET", format!("/{}", relative_path))) - .times(2) + .times(1) .respond_with(status_code(403)) } /// Asserts that the named file in `outdir` exactly matches the file in `tuf-reference-impl/targets` -fn assert_file_match(outdir: &TempDir, filename: &str) { - let got = read_to_string(outdir.path().join(filename)).unwrap(); +fn assert_file_match(outdir: &Path, filename: &str) { + let got = read_to_string(outdir.join(filename)).unwrap(); let want = read_to_string( test_utils::test_data() .join("tuf-reference-impl") @@ -46,7 +46,8 @@ fn assert_file_match(outdir: &TempDir, filename: &str) { } fn download_command(metadata_base_url: Url, targets_base_url: Url) { - let outdir = TempDir::new().unwrap(); + let tempdir = TempDir::new().unwrap(); + let outdir = tempdir.path().join("outdir"); let root_json = test_utils::test_data() .join("tuf-reference-impl") .join("metadata") @@ -63,7 +64,7 @@ fn download_command(metadata_base_url: Url, targets_base_url: Url) { metadata_base_url.as_str(), "--targets-url", targets_base_url.as_str(), - outdir.path().to_str().unwrap(), + outdir.to_str().unwrap(), ]) .assert() .success(); @@ -72,15 +73,7 @@ fn download_command(metadata_base_url: Url, targets_base_url: Url) { assert_file_match(&outdir, "file1.txt"); assert_file_match(&outdir, "file2.txt"); - // Add "bloop" to the end of file1.txt so that we can prove that the file is truncated when we - // download the repo a second time into the same outdir. - let mut f = OpenOptions::write(&mut OpenOptions::new(), true) - .append(true) - .open(outdir.path().join("file1.txt")) - .unwrap(); - writeln!(f, "bloop").unwrap(); - - // Download again into the same outdir + // Download again into the same outdir, this will fail because the directory exists. Command::cargo_bin("tuftool") .unwrap() .args(&[ @@ -91,20 +84,16 @@ fn download_command(metadata_base_url: Url, targets_base_url: Url) { metadata_base_url.as_str(), "--targets-url", targets_base_url.as_str(), - outdir.path().to_str().unwrap(), + outdir.to_str().unwrap(), ]) .assert() - .success(); - - // Assert the files are exactly correct - assert_file_match(&outdir, "file1.txt"); - assert_file_match(&outdir, "file2.txt"); + .failure(); } #[test] -// Ensure that the download command works with http url, and that we truncate files when downloading into a non- -// empty directory (i.e. that issue #173 is fixed). -fn download_command_truncates_http() { +// Ensure that the download command works with http transport and that we require outdir to +// not-exist. +fn download_http_transport() { let server = Server::run(); server.expect(create_successful_get("metadata/role1.json")); server.expect(create_successful_get("metadata/role2.json")); @@ -120,16 +109,16 @@ fn download_command_truncates_http() { } #[test] -// Ensure that the download command works with file url, and that we truncate files when downloading into a non- -// empty directory (i.e. that issue #173 is fixed). -fn download_command_truncates_file() { +// Ensure that the download command works with file transport, and that we require outdir to +// not-exist. +fn download_file_transport() { let repo_dir = test_utils::test_data().join("tuf-reference-impl"); let metadata_base_url = test_utils::dir_url(repo_dir.join("metadata").to_str().unwrap()); let targets_base_url = test_utils::dir_url(repo_dir.join("targets").to_str().unwrap()); download_command(metadata_base_url, targets_base_url); } -fn download_expired_repo(outdir: &TempDir, repo_dir: &TempDir, allow_expired_repo: bool) -> Assert { +fn download_expired_repo(outdir: &Path, repo_dir: &TempDir, allow_expired_repo: bool) -> Assert { let root_json = test_utils::test_data().join("simple-rsa").join("root.json"); let metadata_base_url = &test_utils::dir_url(repo_dir.path().join("metadata")); let targets_base_url = &test_utils::dir_url(repo_dir.path().join("targets")); @@ -142,7 +131,7 @@ fn download_expired_repo(outdir: &TempDir, repo_dir: &TempDir, allow_expired_rep metadata_base_url.as_str(), "--targets-url", targets_base_url.as_str(), - outdir.path().to_str().unwrap(), + outdir.to_str().unwrap(), ]); if allow_expired_repo { cmd.arg("--allow-expired-repo").assert() @@ -159,13 +148,14 @@ fn download_command_expired_repo_fail() { // Create a expired repo using tuftool test_utils::create_expired_repo(repo_dir.path()); // assert failure for download command - download_expired_repo(&outdir, &repo_dir, false).failure(); + download_expired_repo(outdir.path(), &repo_dir, false).failure(); } #[test] // Ensure download command is successful when metadata has expired but --allow-expired-repo flag is passed fn download_command_expired_repo_allow() { - let outdir = TempDir::new().unwrap(); + let tempdir = TempDir::new().unwrap(); + let outdir = tempdir.path().join("outdir"); let repo_dir = TempDir::new().unwrap(); // Create a expired repo using tuftool test_utils::create_expired_repo(repo_dir.path()); @@ -175,3 +165,28 @@ fn download_command_expired_repo_allow() { assert_file_match(&outdir, "file1.txt"); assert_file_match(&outdir, "file2.txt"); } + +#[test] +// Ensure that we handle path-like target names correctly. +fn download_safe_target_paths() { + let repo_dir = test_utils::test_data().join("safe-target-paths"); + let root = repo_dir.join("metadata").join("1.root.json"); + let metadata_base_url = &test_utils::dir_url(repo_dir.join("metadata")); + let targets_base_url = &test_utils::dir_url(repo_dir.join("targets")); + let tempdir = TempDir::new().unwrap(); + let outdir = tempdir.path().join("outdir"); + let mut cmd = Command::cargo_bin("tuftool").unwrap(); + cmd.args(&[ + "download", + "-r", + root.to_str().unwrap(), + "--metadata-url", + metadata_base_url.as_str(), + "--targets-url", + targets_base_url.as_str(), + outdir.to_str().unwrap(), + ]); + cmd.assert().success(); + assert!(outdir.join("data1.txt").is_file()); + assert!(outdir.join("foo/bar/data2.txt").is_file()) +} diff --git a/tuftool/tests/update_command.rs b/tuftool/tests/update_command.rs index 8c7a6fd8..284e1e1a 100644 --- a/tuftool/tests/update_command.rs +++ b/tuftool/tests/update_command.rs @@ -10,7 +10,7 @@ use std::fs::File; use std::path::Path; use tempfile::TempDir; use test_utils::dir_url; -use tough::RepositoryLoader; +use tough::{RepositoryLoader, TargetName}; fn create_repo>(repo_dir: P) { let timestamp_expiration = Utc::now().checked_add_signed(Duration::days(1)).unwrap(); @@ -191,16 +191,19 @@ fn update_command_with_new_targets() { assert_eq!(repo.targets().signed.targets.len(), 6); // Ensure we can read the newly added targets + let file4 = TargetName::new("file4.txt").unwrap(); assert_eq!( - test_utils::read_to_end(repo.read_target("file4.txt").unwrap().unwrap()), + test_utils::read_to_end(repo.read_target(&file4).unwrap().unwrap()), &b"This is an example target file."[..] ); + let file5 = TargetName::new("file5.txt").unwrap(); assert_eq!( - test_utils::read_to_end(repo.read_target("file5.txt").unwrap().unwrap()), + test_utils::read_to_end(repo.read_target(&file5).unwrap().unwrap()), &b"This is another example target file."[..] ); + let file6 = TargetName::new("file6.txt").unwrap(); assert_eq!( - test_utils::read_to_end(repo.read_target("file6.txt").unwrap().unwrap()), + test_utils::read_to_end(repo.read_target(&file6).unwrap().unwrap()), &b"This is yet another example target file."[..] );