diff --git a/Cargo.lock b/Cargo.lock index a2950e07..f26e888f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2,6 +2,41 @@ # It is not intended for manual editing. version = 3 +[[package]] +name = "aead" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b613b8e1e3cf911a086f53f03bf286f52fd7a7258e4fa606f0ef220d39d8877" +dependencies = [ + "generic-array", +] + +[[package]] +name = "aes" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e8b47f52ea9bae42228d07ec09eb676433d7c4ed1ebdf0f1d1c29ed446f1ab8" +dependencies = [ + "cfg-if 1.0.0", + "cipher", + "cpufeatures", + "opaque-debug", +] + +[[package]] +name = "aes-gcm" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df5f85a83a7d8b0442b6aa7b504b8212c1733da07b98aae43d4bc21b2cb3cdf6" +dependencies = [ + "aead", + "aes", + "cipher", + "ctr", + "ghash", + "subtle", +] + [[package]] name = "ahash" version = "0.7.6" @@ -245,6 +280,21 @@ version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "904dfeac50f3cdaba28fc6f57fdcddb75f49ed61346676a78c4ffe55877802fd" +[[package]] +name = "bit-set" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0700ddab506f33b20a03b13996eccd309a48e5ff77d0d95926aa0210fb4e95f1" +dependencies = [ + "bit-vec", +] + +[[package]] +name = "bit-vec" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb" + [[package]] name = "bitflags" version = "1.3.2" @@ -259,6 +309,7 @@ checksum = "1489fcb93a5bb47da0462ca93ad252ad6af2145cce58d10d46a83931ba9f016b" dependencies = [ "funty", "radium", + "serde", "tap", "wyz", ] @@ -402,6 +453,15 @@ dependencies = [ "unsigned-varint", ] +[[package]] +name = "cipher" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ee52072ec15386f770805afd189a01c8841be8696bed250fa2f13c4c0d6dfb7" +dependencies = [ + "generic-array", +] + [[package]] name = "cloudabi" version = "0.0.3" @@ -484,6 +544,15 @@ dependencies = [ "syn", ] +[[package]] +name = "ctr" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "049bb91fb4aaf0e3c7efa6cd5ef877dbbbd15b39dad06d9948de4ec8a75761ea" +dependencies = [ + "cipher", +] + [[package]] name = "data-encoding" version = "2.3.2" @@ -703,6 +772,16 @@ dependencies = [ "wasi 0.11.0+wasi-snapshot-preview1", ] +[[package]] +name = "ghash" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1583cc1656d7839fd3732b80cf4f38850336cdb9b8ded1cd399ca62958de3c99" +dependencies = [ + "opaque-debug", + "polyval", +] + [[package]] name = "gloo-timers" version = "0.2.4" @@ -1033,6 +1112,12 @@ version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7709cef83f0c1f58f666e746a08b21e0085f7440fa6a29cc194d68aac97a4225" +[[package]] +name = "opaque-debug" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5" + [[package]] name = "parking" version = "2.0.0" @@ -1097,6 +1182,18 @@ dependencies = [ "winapi", ] +[[package]] +name = "polyval" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8419d2b623c7c0896ff2d5d96e2cb4ede590fed28fcc34934f4c33c036e620a1" +dependencies = [ + "cfg-if 1.0.0", + "cpufeatures", + "opaque-debug", + "universal-hash", +] + [[package]] name = "ppv-lite86" version = "0.2.16" @@ -1146,6 +1243,26 @@ dependencies = [ "unicode-ident", ] +[[package]] +name = "proptest" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e0d9cc07f18492d879586c92b485def06bc850da3118075cd45d50e9c95b0e5" +dependencies = [ + "bit-set", + "bitflags", + "byteorder", + "lazy_static", + "num-traits", + "quick-error 2.0.1", + "rand 0.8.5", + "rand_chacha 0.3.1", + "rand_xorshift 0.3.0", + "regex-syntax", + "rusty-fork", + "tempfile", +] + [[package]] name = "prost" version = "0.9.0" @@ -1199,6 +1316,18 @@ dependencies = [ "prost", ] +[[package]] +name = "quick-error" +version = "1.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" + +[[package]] +name = "quick-error" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a993555f31e5a609f617c12db6250dedcac1b0a85076912c436e6fc9b2c8e6a3" + [[package]] name = "quote" version = "1.0.20" @@ -1228,7 +1357,7 @@ dependencies = [ "rand_isaac", "rand_os", "rand_pcg", - "rand_xorshift", + "rand_xorshift 0.1.1", "winapi", ] @@ -1338,6 +1467,15 @@ dependencies = [ "rand_core 0.3.1", ] +[[package]] +name = "rand_xorshift" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d25bf25ec5ae4a3f1b92f929810509a2f53d7dca2f50b794ff57e3face536c8f" +dependencies = [ + "rand_core 0.6.3", +] + [[package]] name = "rdrand" version = "0.4.0" @@ -1382,6 +1520,18 @@ dependencies = [ "winapi", ] +[[package]] +name = "rusty-fork" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb3dcc6e454c328bb824492db107ab7c0ae8fcffe4ad210136ef014458c1bc4f" +dependencies = [ + "fnv", + "quick-error 1.2.3", + "tempfile", + "wait-timeout", +] + [[package]] name = "ryu" version = "1.0.10" @@ -1499,6 +1649,7 @@ dependencies = [ "base64", "hex", "rand 0.6.3", + "serde", "sha3", ] @@ -1524,6 +1675,35 @@ dependencies = [ "winapi", ] +[[package]] +name = "structmeta" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bd9c2155aa89fb2c2cb87d99a610c689e7c47099b3e9f1c8a8f53faf4e3d2e3" +dependencies = [ + "proc-macro2", + "quote", + "structmeta-derive", + "syn", +] + +[[package]] +name = "structmeta-derive" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bafede0d0a2f21910f36d47b1558caae3076ed80f6f3ad0fc85a91e6ba7e5938" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "subtle" +version = "2.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601" + [[package]] name = "syn" version = "1.0.98" @@ -1587,6 +1767,18 @@ dependencies = [ "syn", ] +[[package]] +name = "test-strategy" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c18b325ca048e49683d5cb9166a50191fc862e36020706bbd7723c22a05d4ffa" +dependencies = [ + "proc-macro2", + "quote", + "structmeta", + "syn", +] + [[package]] name = "thiserror" version = "1.0.31" @@ -1651,6 +1843,16 @@ version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "957e51f3646910546462e67d5f7599b9e4fb8acdd304b087a6494730f9eebf04" +[[package]] +name = "universal-hash" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9f214e8f697e925001e66ec2c6e37a4ef93f0f78c2eed7814394e10c62025b05" +dependencies = [ + "generic-array", + "subtle", +] + [[package]] name = "unsigned-varint" version = "0.7.1" @@ -1673,6 +1875,15 @@ version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" +[[package]] +name = "wait-timeout" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9f200f5b12eb75f8c1ed65abd4b2db8a6e1b138a20de009dacee265a2498f3f6" +dependencies = [ + "libc", +] + [[package]] name = "waker-fn" version = "1.1.0" @@ -1785,7 +1996,7 @@ dependencies = [ [[package]] name = "wasm-wnfs" -version = "0.1.7" +version = "0.1.8" dependencies = [ "anyhow", "async-trait", @@ -1919,8 +2130,9 @@ checksum = "c811ca4a8c853ef420abd8592ba53ddbbac90410fab6903b3e79972a631f7680" [[package]] name = "wnfs" -version = "0.1.7" +version = "0.1.8" dependencies = [ + "aes-gcm", "anyhow", "async-once-cell", "async-recursion", @@ -1937,6 +2149,7 @@ dependencies = [ "libipld", "log", "multihash", + "proptest", "rand 0.8.5", "semver", "serde", @@ -1944,6 +2157,7 @@ dependencies = [ "sha3", "skip_ratchet", "test-log", + "test-strategy", "thiserror", "xxhash-rust", ] diff --git a/crates/fs/Cargo.toml b/crates/fs/Cargo.toml index 2e173cd9..a53c3ffa 100644 --- a/crates/fs/Cargo.toml +++ b/crates/fs/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "wnfs" -version = "0.1.7" +version = "0.1.8" description = "WebNative filesystem core implementation" keywords = ["wnfs", "webnative", "ipfs", "decentralisation"] categories = [ @@ -17,7 +17,7 @@ homepage = "https://fission.codes" authors = ["The Fission Authors"] [dependencies] -libipld = { version = "0.13.1", features = ["dag-cbor", "derive", "serde-codec" ] } +libipld = { version = "0.13.1", features = ["dag-cbor", "derive", "serde-codec"] } serde_repr = "0.1" serde = { version = "1.0.137", features = ["rc"]} multihash = "0.16.2" @@ -31,19 +31,22 @@ async-recursion = "1.0.0" futures = "0.3.21" async-stream = "0.3.3" futures-util = "0.3.21" -skip_ratchet = "0.1.0" -bitvec = "1.0.0" +skip_ratchet = { version = "0.1.0", features = ["serde"] } +bitvec = { version = "1.0.0", features = ["serde"] } async-once-cell = "0.4.0" sha3 = "0.10.0" log = "0.4.17" xxhash-rust = { version = "0.8.5", features = ["xxh3"] } lazy_static = "1.4.0" thiserror = "1.0.31" +aes-gcm = "0.9.4" [dev-dependencies] env_logger = "0.9.0" test-log = "0.2.10" rand = "0.8.5" +proptest = "1.0.0" +test-strategy = "0.2.0" [lib] path = "lib.rs" diff --git a/crates/fs/common/async_serialize.rs b/crates/fs/common/async_serialize.rs new file mode 100644 index 00000000..fac0bd66 --- /dev/null +++ b/crates/fs/common/async_serialize.rs @@ -0,0 +1,101 @@ +use std::rc::Rc; + +use async_trait::async_trait; +use libipld::{error::SerdeError, serde as ipld_serde, Cid, Ipld}; +use serde::Serialize; +use serde::Serializer; + +use super::ReferenceableStore; + +//-------------------------------------------------------------------------------------------------- +// Macros +//-------------------------------------------------------------------------------------------------- + +// NOTE: For now, we only implement AsyncSerialize. +macro_rules! impl_async_serialize { + ( $( $ty:ty $( : < $( $generics:ident ),+ > )? ),+ ) => { + $( + #[async_trait(?Send)] + impl $( < $( $generics ),+ > )? AsyncSerialize for $ty $( where $( $generics: Serialize ),+ )? { + type StoreRef = Cid; + + async fn async_serialize + ?Sized>( + &self, + serializer: S, + _: &mut RS, + ) -> Result { + self.serialize(serializer) + } + } + )+ + }; +} + +//-------------------------------------------------------------------------------------------------- +// Type Definitions +//-------------------------------------------------------------------------------------------------- + +/// A **data structure** that can be serialized into any data format supported +/// by Serde. +/// +/// This trait is slightly different from Serde's Serialize trait because it allows for asynchronous +/// serialisation and it is designed for the IPLD ecosystem where a `Store` is sometimes needed to +/// properly resolve the internal state of certain data structures to Cids. +/// +/// An example of this is the PublicDirectory which can contain links to other IPLD nodes. +/// These links need to be resolved to Cids during serialization if they aren't already. +#[async_trait(?Send)] +pub trait AsyncSerialize { + type StoreRef; + + /// Serializes the type. + async fn async_serialize( + &self, + serializer: S, + store: &mut RS, + ) -> Result + where + S: Serializer, + RS: ReferenceableStore + ?Sized; + + /// Serialize with an IPLD serializer. + async fn async_serialize_ipld(&self, store: &mut RS) -> Result + where + RS: ReferenceableStore + ?Sized, + { + self.async_serialize(ipld_serde::Serializer, store).await + } +} + +//-------------------------------------------------------------------------------------------------- +// Implementations +//-------------------------------------------------------------------------------------------------- + +#[async_trait(?Send)] +impl AsyncSerialize for Rc { + type StoreRef = T::StoreRef; + + async fn async_serialize(&self, serializer: S, store: &mut RS) -> Result + where + S: Serializer, + RS: ReferenceableStore + ?Sized, + { + self.as_ref().async_serialize(serializer, store).await + } +} + +impl_async_serialize! { usize, u128, u64, u32, u16, u8, isize, i128, i64, i32, i16, i8 } +impl_async_serialize! { String, &str } +impl_async_serialize! { + (A,): , + (A, B): , + (A, B, C): , + (A, B, C, D): , + (A, B, C, D, E): , + (A, B, C, D, E, F): , + (A, B, C, D, E, F, G): , + (A, B, C, D, E, F, G, H): , + (A, B, C, D, E, F, G, H, I): , + (A, B, C, D, E, F, G, H, I, J): , + (A, B, C, D, E, F, G, H, I, J, K): +} diff --git a/crates/fs/common/blockstore.rs b/crates/fs/common/blockstore.rs index bd223d13..64ea9ec1 100644 --- a/crates/fs/common/blockstore.rs +++ b/crates/fs/common/blockstore.rs @@ -14,7 +14,10 @@ use libipld::{ use multihash::{Code, MultihashDigest}; use serde::{de::DeserializeOwned, Serialize}; -use crate::AsyncSerialize; +use crate::{ + private::{Key, Rng, NONCE_SIZE}, + AsyncSerialize, ReferenceableStore, +}; use super::FsError; @@ -28,7 +31,7 @@ pub trait BlockStore { async fn get_block<'a>(&'a self, cid: &Cid) -> Result>>; async fn put_block(&mut self, bytes: Vec, codec: IpldCodec) -> Result; - async fn put_serializable(&mut self, value: &S) -> Result { + async fn put_serializable(&mut self, value: &V) -> Result { let ipld = ipld_serde::to_ipld(value)?; let mut bytes = Vec::new(); @@ -37,7 +40,27 @@ pub trait BlockStore { self.put_block(bytes, IpldCodec::DagCbor).await } - async fn put_async_serializable(&mut self, value: &S) -> Result { + async fn put_private_serializable( + &mut self, + value: &V, + key: &Key, + rng: &mut R, + ) -> Result + where + V: Serialize, + R: Rng, + { + let ipld = ipld_serde::to_ipld(value)?; + let mut bytes = Vec::new(); + ipld.encode(DagCborCodec, &mut bytes)?; + let enc_bytes = key.encrypt(&rng.random_bytes::(), &bytes)?; + self.put_block(enc_bytes, IpldCodec::DagCbor).await + } + + async fn put_async_serializable>( + &mut self, + value: &V, + ) -> Result { let ipld = value.async_serialize_ipld(self).await?; let mut bytes = Vec::new(); @@ -46,10 +69,21 @@ pub trait BlockStore { self.put_block(bytes, IpldCodec::DagCbor).await } - async fn get_deserializable<'a, D: DeserializeOwned>(&'a self, cid: &Cid) -> Result { + async fn get_deserializable<'a, V: DeserializeOwned>(&'a self, cid: &Cid) -> Result { let bytes = self.get_block(cid).await?; let ipld = Ipld::decode(DagCborCodec, &mut Cursor::new(bytes.as_ref()))?; - Ok(ipld_serde::from_ipld::(ipld)?) + Ok(ipld_serde::from_ipld::(ipld)?) + } + + async fn get_private_deserializable<'a, V: DeserializeOwned>( + &'a self, + cid: &Cid, + key: &Key, + ) -> Result { + let enc_bytes = self.get_block(cid).await?; + let bytes = key.decrypt(enc_bytes.as_ref())?; + let ipld = Ipld::decode(DagCborCodec, &mut Cursor::new(bytes))?; + Ok(ipld_serde::from_ipld::(ipld)?) } } @@ -93,6 +127,22 @@ impl BlockStore for MemoryBlockStore { } } +#[async_trait(?Send)] +impl ReferenceableStore for T { + type Ref = Cid; + + async fn get_value(&self, reference: &Self::Ref) -> Result { + self.get_deserializable(reference).await + } + + async fn put_value>( + &mut self, + value: &V, + ) -> Result { + self.put_async_serializable(value).await + } +} + //-------------------------------------------------------------------------------------------------- // Functions //-------------------------------------------------------------------------------------------------- diff --git a/crates/fs/common/constants.rs b/crates/fs/common/constants.rs deleted file mode 100644 index 8ac08e48..00000000 --- a/crates/fs/common/constants.rs +++ /dev/null @@ -1 +0,0 @@ -pub const HASH_BYTE_SIZE: usize = 32; diff --git a/crates/fs/common/encoding.rs b/crates/fs/common/encoding.rs index 42a1891a..cf0f582d 100644 --- a/crates/fs/common/encoding.rs +++ b/crates/fs/common/encoding.rs @@ -7,7 +7,7 @@ pub mod dagcbor { use libipld::{ cbor::DagCborCodec, codec::{Decode, Encode}, - serde as ipld_serde, Ipld, + serde as ipld_serde, Cid, Ipld, }; use serde::{de::DeserializeOwned, Serialize}; @@ -22,8 +22,8 @@ pub mod dagcbor { } /// Encodes an async serializable value into DagCbor bytes. - pub async fn async_encode( - value: &S, + pub async fn async_encode, B: BlockStore>( + value: &V, store: &mut B, ) -> Result> { let ipld = value.async_serialize_ipld(store).await?; diff --git a/crates/fs/common/error.rs b/crates/fs/common/error.rs index 88c5376b..f6e299a8 100644 --- a/crates/fs/common/error.rs +++ b/crates/fs/common/error.rs @@ -1,33 +1,57 @@ //! File system errors. -use std::error::Error; - use anyhow::Result; use thiserror::Error; /// File system errors. -#[derive(Debug, Clone, PartialEq, Eq, Error)] +#[derive(Debug, Error)] pub enum FsError { #[error("Cannot find a node with the specified CID in block store")] CIDNotFoundInBlockstore, + #[error("Invalid WNFS path")] InvalidPath, + #[error("Expected a file")] NotAFile, + #[error("Expected a directory")] NotADirectory, + #[error("Cannot find file or directory")] NotFound, + #[error("File already exists")] FileAlreadyExists, + #[error("Directory already exists")] DirectoryAlreadyExists, + #[error("Move operation on invalid path")] InvalidMoveLocation, + #[error("Cannot decide cbor data")] UndecodableCborData(String), + + #[error("Unable to encrypt data: {0}")] + UnableToEncrypt(String), + + #[error("Unable to decrypt data: {0}")] + UnableToDecrypt(String), + + #[error("Invalid deserialization: {0}")] + InvalidDeserialization(String), + + #[error("Cannot access header data necessary for operation")] + MissingHeader, + + #[error("Expected encrypted ratchet key")] + ExpectEncryptedRatchetKey, + + #[error("Expected bare ratchet key")] + ExpectBareRatchetKey, } -pub fn error(err: impl Error + Send + Sync + 'static) -> Result { +pub fn error(err: impl std::error::Error + Send + Sync + 'static) -> Result { Err(err.into()) } diff --git a/crates/fs/common/link.rs b/crates/fs/common/link.rs index d044687c..76ef74e5 100644 --- a/crates/fs/common/link.rs +++ b/crates/fs/common/link.rs @@ -1,30 +1,20 @@ use anyhow::Result; -use async_once_cell::OnceCell; use async_trait::async_trait; use libipld::Cid; -use serde::de::DeserializeOwned; -use crate::AsyncSerialize; +use crate::{AsyncSerialize, Referenceable, ReferenceableStore}; use crate::{BlockStore, IpldEq}; //-------------------------------------------------------------------------------------------------- // Type Definitions //-------------------------------------------------------------------------------------------------- -/// A data structure that represents a link in the IPLD graph. Basically it is "link" to some content addressable value of `T`. +/// A data structure that represents a link in the IPLD graph. Basically it is a "link" to some content addressable value of `T`. /// /// It supports representing the "link" with a Cid or the deserialized value itself. /// /// Link needs a `BlockStore` to be able to resolve Cids to corresponding values of `T` and vice versa. -#[derive(Debug)] -pub enum Link { - /// A variant of `Link` that starts out as a Cid. - /// It supports converting the Cid to a `T` by caching it only once in `value_cache`. - Encoded { cid: Cid, value_cache: OnceCell }, - /// A variant of `Link` that starts out as a value of `T`. - /// It supports converting the value of `T` to a Cid by caching it only once in `cid_cache`. - Decoded { value: T, cid_cache: OnceCell }, -} +pub type Link = Referenceable; //-------------------------------------------------------------------------------------------------- // Implementations @@ -32,99 +22,41 @@ pub enum Link { impl Link { /// Creates a new `Link` that starts out as a Cid. + #[inline] pub fn from_cid(cid: Cid) -> Self { - Self::Encoded { - cid, - value_cache: OnceCell::new(), - } + Self::from_reference(cid) } - /// Gets an owned value from link. It attempts to it get from the store if it is not present in link. - pub async fn get_owned_value(self, store: &B) -> Result + /// Gets the Cid stored in type. It attempts to get it from the store if it is not present in type. + #[inline] + pub async fn resolve_cid<'a, RS: ReferenceableStore + ?Sized>( + &'a self, + store: &mut RS, + ) -> Result<&'a Cid> where - T: DeserializeOwned, + T: AsyncSerialize, { - match self { - Self::Encoded { - ref cid, - value_cache, - } => match value_cache.into_inner() { - Some(cached) => Ok(cached), - None => store.get_deserializable(cid).await, - }, - Self::Decoded { value, .. } => Ok(value), - } + self.resolve_reference(store).await } - /// Gets the value stored in link. + /// Gets the cid data stored in type. /// - /// NOTE: This does not attempt to get it from the store if it does not exist. - pub fn get_value(&self) -> Option<&T> { - match self { - Self::Encoded { value_cache, .. } => value_cache.get(), - Self::Decoded { value, .. } => Some(value), - } - } - - /// Gets the Cid stored in link. - /// - /// NOTE: This does not attempt to get it from the store if it does not exist. + /// NOTE: This does not attempt to get it from the store if it does not exist.. + #[inline] pub fn get_cid(&self) -> Option<&Cid> { - match self { - Self::Encoded { cid, .. } => Some(cid), - Self::Decoded { cid_cache, .. } => cid_cache.get(), - } - } - - /// Gets the value stored in link. It attempts to get it from the store if it is not present in link. - pub async fn resolve_value<'a, B: BlockStore>(&'a self, store: &B) -> Result<&'a T> - where - T: DeserializeOwned, - { - match self { - Self::Encoded { cid, value_cache } => { - value_cache - .get_or_try_init(async { store.get_deserializable(cid).await }) - .await - } - Self::Decoded { value, .. } => Ok(value), - } - } - - /// Gets the Cid stored in link. It attempts to get it from the store if it is not present in link. - pub async fn resolve_cid<'a, B: BlockStore + ?Sized>(&'a self, store: &mut B) -> Result<&'a Cid> - where - T: AsyncSerialize, - { - match self { - Self::Encoded { cid, .. } => Ok(cid), - Self::Decoded { value, cid_cache } => { - cid_cache - .get_or_try_init(async { store.put_async_serializable(value).await }) - .await - } - } - } - - /// Checks if there is a value stored in link. - pub fn has_value(&self) -> bool { - match self { - Self::Encoded { value_cache, .. } => value_cache.get().is_some(), - _ => true, - } + self.get_reference() } /// Checks if there is a Cid stored in link. + #[inline] pub fn has_cid(&self) -> bool { - match self { - Self::Decoded { cid_cache, .. } => cid_cache.get().is_some(), - _ => true, - } + self.has_reference() } + /// Compares two links for equality. Attempts to get them from store if they are not already cached. pub async fn deep_eq(&self, other: &Link, store: &mut B) -> Result where - T: PartialEq + AsyncSerialize, + T: PartialEq + AsyncSerialize, { if self == other { return Ok(true); @@ -135,7 +67,7 @@ impl Link { } #[async_trait(?Send)] -impl IpldEq for Link { +impl> IpldEq for Link { async fn eq(&self, other: &Link, store: &mut B) -> Result { if self == other { return Ok(true); @@ -145,61 +77,6 @@ impl IpldEq for Link { } } -impl From for Link { - fn from(value: T) -> Self { - Self::Decoded { - value, - cid_cache: OnceCell::new(), - } - } -} -impl Clone for Link -where - T: Clone, -{ - fn clone(&self) -> Self { - match self { - Link::Encoded { cid, value_cache } => Self::Encoded { - cid: *cid, - value_cache: OnceCell::new_with(value_cache.get().cloned()), - }, - Link::Decoded { value, cid_cache } => Self::Decoded { - value: value.clone(), - cid_cache: OnceCell::new_with(cid_cache.get().cloned()), - }, - } - } -} - -impl PartialEq for Link { - /// This equality check does not cover cases where one holds a Cid and the other holds a value T. - /// This is because sealing or resolving the link requires async operation which PartialEq does not expose. - fn eq(&self, other: &Self) -> bool { - match (self, other) { - (Self::Encoded { cid, .. }, Self::Encoded { cid: cid2, .. }) => cid == cid2, - (Self::Decoded { value, .. }, Self::Decoded { value: value2, .. }) => value == value2, - (Self::Encoded { cid, .. }, Self::Decoded { value: value2, .. }) => { - if let Some(cid2) = other.get_cid() { - cid == cid2 - } else if let Some(value) = self.get_value() { - value == value2 - } else { - false - } - } - (Self::Decoded { value, .. }, Self::Encoded { cid: cid2, .. }) => { - if let Some(cid) = self.get_cid() { - cid == cid2 - } else if let Some(value2) = other.get_value() { - value == value2 - } else { - false - } - } - } - } -} - //-------------------------------------------------------------------------------------------------- // Tests //-------------------------------------------------------------------------------------------------- diff --git a/crates/fs/common/mod.rs b/crates/fs/common/mod.rs index 55f75f73..d6f54cc5 100644 --- a/crates/fs/common/mod.rs +++ b/crates/fs/common/mod.rs @@ -1,19 +1,25 @@ +mod async_serialize; pub mod blockstore; -mod constants; mod encoding; mod error; -pub mod link; +mod link; mod metadata; +mod pathnodes; +mod referenceable; +pub mod utils; +pub use async_serialize::*; pub use blockstore::*; -pub use constants::*; pub use encoding::*; pub use error::*; pub use link::*; pub use metadata::*; +pub use pathnodes::*; +pub use referenceable::*; //-------------------------------------------------------------------------------------------------- // Type Definitions //-------------------------------------------------------------------------------------------------- +pub const HASH_BYTE_SIZE: usize = 32; pub type HashOutput = [u8; HASH_BYTE_SIZE]; diff --git a/crates/fs/common/pathnodes.rs b/crates/fs/common/pathnodes.rs new file mode 100644 index 00000000..82873b54 --- /dev/null +++ b/crates/fs/common/pathnodes.rs @@ -0,0 +1,77 @@ +use std::rc::Rc; + +//-------------------------------------------------------------------------------------------------- +// Type Definitions +//-------------------------------------------------------------------------------------------------- + +/// Represents the directory nodes along a path. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct PathNodes { + pub path: Vec<(Rc, String)>, + pub tail: Rc, +} + +/// The kinds of outcome from getting a `PathNodes`. +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum PathNodesResult { + /// The complete path exists. + Complete(PathNodes), + + /// The path does not exist. + MissingLink(PathNodes, String), + + /// Encountered a node that is not a directory. + NotADirectory(PathNodes, String), +} + +//-------------------------------------------------------------------------------------------------- +// Implementations +//-------------------------------------------------------------------------------------------------- + +impl PathNodes { + /// Returns the length of the path nodes. + /// + /// # Examples + /// + /// ``` + /// use wnfs::{PathNodes, public::PublicDirectory}; + /// use std::rc::Rc; + /// use chrono::Utc; + /// + /// let nodes = PathNodes:: { + /// path: vec![ + /// (Rc::new(PublicDirectory::new(Utc::now())), "music".to_string()), + /// (Rc::new(PublicDirectory::new(Utc::now())), "rock".to_string()), + /// ], + /// tail: Rc::new(PublicDirectory::new(Utc::now())), + /// }; + /// + /// assert_eq!(nodes.len(), 2); + /// ``` + pub fn len(&self) -> usize { + self.path.len() + } + + /// Checks if the path nodes are empty. + /// + /// # Examples + /// + /// ``` + /// use wnfs::{PathNodes, public::PublicDirectory}; + /// use std::rc::Rc; + /// use chrono::Utc; + /// + /// let nodes = PathNodes:: { + /// path: vec![ + /// (Rc::new(PublicDirectory::new(Utc::now())), "music".to_string()), + /// (Rc::new(PublicDirectory::new(Utc::now())), "rock".to_string()), + /// ], + /// tail: Rc::new(PublicDirectory::new(Utc::now())), + /// }; + /// + /// assert!(!nodes.is_empty()); + /// ``` + pub fn is_empty(&self) -> bool { + self.path.is_empty() + } +} diff --git a/crates/fs/common/referenceable.rs b/crates/fs/common/referenceable.rs new file mode 100644 index 00000000..7a0e5347 --- /dev/null +++ b/crates/fs/common/referenceable.rs @@ -0,0 +1,236 @@ +use anyhow::Result; +use async_once_cell::OnceCell; +use async_trait::async_trait; +use serde::de::DeserializeOwned; + +use crate::AsyncSerialize; + +//-------------------------------------------------------------------------------------------------- +// Type Definitions +//-------------------------------------------------------------------------------------------------- + +/// This is an abstract data structure that can be used to cache reference to some data and vice versa. +/// Basically it allows some "reference" of type `R` to some addressable value of `T`. +/// +/// It supports representing the data as its reference or the value itself. +/// +/// This data structure is backed by a [ReferenceableStore](crate::ReferenceableStore) which is used to resolve the reference to the actual value. +#[derive(Debug)] +pub enum Referenceable { + /// A variant of `Referenceable` that starts out as a value of R. + /// It supports converting a reference to a value of `V` by caching it only once in `value_cache`. + Encoded { + reference: R, + value_cache: OnceCell, + }, + /// A variant of `Referenceable` that starts out as a value of `V.`. + /// It supports converting the value of `V` to a reference by caching it only once in `reference_cache`. + Decoded { + value: V, + reference_cache: OnceCell, + }, +} + +/// This represents a store that can keep values serializable values and return some reference (of type `Ref`) to them. +/// +/// References can be used to fetch the corresponding value from the store. +#[async_trait(?Send)] +pub trait ReferenceableStore { + type Ref; + + async fn get_value(&self, reference: &Self::Ref) -> Result; + async fn put_value>( + &mut self, + value: &V, + ) -> Result; +} + +//-------------------------------------------------------------------------------------------------- +// Implementations +//-------------------------------------------------------------------------------------------------- + +impl Referenceable { + /// Creates a new `Referenceable` that starts out as a value of `R`. + pub fn from_reference(reference: R) -> Self { + Self::Encoded { + reference, + value_cache: OnceCell::new(), + } + } + + /// Gets an owned value from type. It attempts to it get from the store if it is not present in type. + pub async fn get_owned_value>(self, store: &RS) -> Result + where + V: DeserializeOwned, + { + match self { + Self::Encoded { + ref reference, + value_cache, + } => match value_cache.into_inner() { + Some(cached) => Ok(cached), + None => store.get_value(reference).await, + }, + Self::Decoded { value, .. } => Ok(value), + } + } + + /// Gets the value stored in type. + /// + /// NOTE: This does not attempt to get it from the store if it does not exist. + pub fn get_value(&self) -> Option<&V> { + match self { + Self::Encoded { value_cache, .. } => value_cache.get(), + Self::Decoded { value, .. } => Some(value), + } + } + + /// Gets the reference data stored in type. + /// + /// NOTE: This does not attempt to get it from the store if it does not exist. + pub fn get_reference(&self) -> Option<&R> { + match self { + Self::Encoded { reference, .. } => Some(reference), + Self::Decoded { + reference_cache, .. + } => reference_cache.get(), + } + } + + /// Gets the value stored in link. It attempts to get it from the store if it is not present in link. + pub async fn resolve_value<'a, RS: ReferenceableStore>( + &'a self, + store: &RS, + ) -> Result<&'a V> + where + V: DeserializeOwned, + { + match self { + Self::Encoded { + reference, + value_cache, + } => { + value_cache + .get_or_try_init(async { store.get_value(reference).await }) + .await + } + Self::Decoded { value, .. } => Ok(value), + } + } + + /// Gets the reference data stored in type. It attempts to get it from the store if it is not present in type. + pub async fn resolve_reference<'a, RS: ReferenceableStore + ?Sized>( + &'a self, + store: &mut RS, + ) -> Result<&'a R> + where + V: AsyncSerialize, + { + match self { + Self::Encoded { reference, .. } => Ok(reference), + Self::Decoded { + value, + reference_cache, + } => { + reference_cache + .get_or_try_init(async { store.put_value(value).await }) + .await + } + } + } + + /// Checks if there is a value stored in link. + pub fn has_value(&self) -> bool { + match self { + Self::Encoded { value_cache, .. } => value_cache.get().is_some(), + _ => true, + } + } + + /// Checks if there is a Cid stored in link. + pub fn has_reference(&self) -> bool { + match self { + Self::Decoded { + reference_cache, .. + } => reference_cache.get().is_some(), + _ => true, + } + } +} + +impl From for Referenceable { + fn from(value: V) -> Self { + Self::Decoded { + value, + reference_cache: OnceCell::new(), + } + } +} + +impl Clone for Referenceable +where + V: Clone, + R: Clone, +{ + fn clone(&self) -> Self { + match self { + Self::Encoded { + reference, + value_cache, + } => Self::Encoded { + reference: reference.clone(), + value_cache: OnceCell::new_with(value_cache.get().cloned()), + }, + Self::Decoded { + value, + reference_cache, + } => Self::Decoded { + value: value.clone(), + reference_cache: OnceCell::new_with(reference_cache.get().cloned()), + }, + } + } +} + +impl PartialEq for Referenceable +where + R: PartialEq, + V: PartialEq, +{ + fn eq(&self, other: &Self) -> bool { + match (self, other) { + ( + Self::Encoded { reference, .. }, + Self::Encoded { + reference: reference2, + .. + }, + ) => reference == reference2, + (Self::Decoded { value, .. }, Self::Decoded { value: value2, .. }) => value == value2, + (Self::Encoded { reference, .. }, Self::Decoded { value: value2, .. }) => { + if let Some(reference2) = other.get_reference() { + reference == reference2 + } else if let Some(value) = self.get_value() { + value == value2 + } else { + false + } + } + ( + Self::Decoded { value, .. }, + Self::Encoded { + reference: reference2, + .. + }, + ) => { + if let Some(reference) = self.get_reference() { + reference == reference2 + } else if let Some(value2) = other.get_value() { + value == value2 + } else { + false + } + } + } + } +} diff --git a/crates/fs/common/utils.rs b/crates/fs/common/utils.rs new file mode 100644 index 00000000..a4f4b1fe --- /dev/null +++ b/crates/fs/common/utils.rs @@ -0,0 +1,92 @@ +use anyhow::Result; +use serde::de::Visitor; +use std::fmt; + +use crate::{error, FsError}; + +//-------------------------------------------------------------------------------------------------- +// Type Definitions +//-------------------------------------------------------------------------------------------------- + +pub(crate) struct ByteArrayVisitor; + +#[cfg(test)] +pub(crate) struct TestRng(); + +#[cfg(test)] +pub(crate) struct ProptestRng(proptest::test_runner::TestRng); + +//-------------------------------------------------------------------------------------------------- +// Implementations +//-------------------------------------------------------------------------------------------------- + +impl<'de, const N: usize> Visitor<'de> for ByteArrayVisitor { + type Value = [u8; N]; + + fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + write!(formatter, "a byte array of length {}", N) + } + + fn visit_bytes(self, v: &[u8]) -> Result + where + E: serde::de::Error, + { + let bytes: [u8; N] = v.try_into().map_err(E::custom)?; + Ok(bytes) + } +} + +#[cfg(test)] +impl crate::private::Rng for TestRng { + fn random_bytes(&mut self) -> [u8; N] { + use rand::RngCore; + let mut bytes = [0u8; N]; + rand::thread_rng().fill_bytes(&mut bytes); + bytes + } +} + +#[cfg(test)] +impl ProptestRng { + pub(crate) fn from_seed(algorithm: proptest::test_runner::RngAlgorithm, seed: &[u8]) -> Self { + Self(proptest::test_runner::TestRng::from_seed(algorithm, seed)) + } +} + +#[cfg(test)] +impl crate::private::Rng for ProptestRng { + fn random_bytes(&mut self) -> [u8; N] { + use rand::RngCore; + let mut bytes = [0u8; N]; + self.0.fill_bytes(&mut bytes); + bytes + } +} + +//-------------------------------------------------------------------------------------------------- +// Functions +//-------------------------------------------------------------------------------------------------- + +pub fn split_last(path_segments: &[String]) -> Result<(&[String], &String)> { + match path_segments.split_last() { + Some((last, rest)) => Ok((rest, last)), + None => error(FsError::InvalidPath), + } +} + +//-------------------------------------------------------------------------------------------------- +// Tests +//-------------------------------------------------------------------------------------------------- + +#[cfg(test)] +mod public_directory_tests { + use super::*; + + #[test] + fn split_last_splits_path_segments_into_tail_and_the_rest() { + let path_segments = ["a".into(), "b".into(), "c".into()]; + let (rest, last) = split_last(&path_segments).unwrap(); + assert_eq!(rest, &["a", "b"]); + assert_eq!(last, &"c"); + } +} diff --git a/crates/fs/private/directory.rs b/crates/fs/private/directory.rs new file mode 100644 index 00000000..ef4ed7de --- /dev/null +++ b/crates/fs/private/directory.rs @@ -0,0 +1,773 @@ +use std::{collections::BTreeMap, rc::Rc}; + +use anyhow::{bail, Result}; +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; + +use super::{ + namefilter::Namefilter, INumber, PrivateFile, PrivateForest, PrivateNode, PrivateNodeHeader, + PrivateRef, Rng, +}; + +use crate::{ + error, utils, BlockStore, FsError, HashOutput, Id, Metadata, PathNodes, PathNodesResult, + UnixFsNodeKind, HASH_BYTE_SIZE, +}; + +//-------------------------------------------------------------------------------------------------- +// Type Definitions +//-------------------------------------------------------------------------------------------------- + +pub type PrivatePathNodes = PathNodes; +pub type PrivatePathNodesResult = PathNodesResult; + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct PrivateDirectoryContent { + pub(crate) metadata: Metadata, + pub(crate) entries: BTreeMap, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct PrivateDirectory { + pub(crate) header: PrivateNodeHeader, + pub(crate) content: PrivateDirectoryContent, +} + +/// The result of an operation applied to a directory. +#[derive(Debug, Clone, PartialEq)] +pub struct PrivateOpResult { + /// The root directory. + pub root_dir: Rc, + /// The hamt forest. + pub hamt: Rc, + /// Implementation dependent but it usually the last leaf node operated on. + pub result: T, +} + +//-------------------------------------------------------------------------------------------------- +// Implementations +//-------------------------------------------------------------------------------------------------- + +impl PrivateDirectory { + /// Creates a new directory with provided details. + pub fn new( + parent_bare_name: Namefilter, + inumber: INumber, + ratchet_seed: HashOutput, + time: DateTime, + ) -> Self { + Self { + header: PrivateNodeHeader::new(parent_bare_name, inumber, ratchet_seed), + content: PrivateDirectoryContent { + metadata: Metadata::new(time, UnixFsNodeKind::Dir), + entries: BTreeMap::new(), + }, + } + } + + /// Generates two random set of bytes. + pub fn generate_double_random(rng: &mut R) -> (HashOutput, HashOutput) { + const _DOUBLE_SIZE: usize = HASH_BYTE_SIZE * 2; + let [first, second] = unsafe { + std::mem::transmute::<[u8; _DOUBLE_SIZE], [[u8; HASH_BYTE_SIZE]; 2]>( + rng.random_bytes::<_DOUBLE_SIZE>(), + ) + }; + (first, second) + } + + /// Advances the ratchet. + pub(crate) fn advance_ratchet(&mut self) { + self.header.advance_ratchet(); + } + + /// Creates a new `PathNodes` that is not based on an existing file tree. + pub(crate) fn create_path_nodes( + path_segments: &[String], + time: DateTime, + parent_bare_name: Namefilter, + rng: &mut R, + ) -> PrivatePathNodes { + let mut working_parent_bare_name = parent_bare_name; + let (mut inumber, mut ratchet_seed) = Self::generate_double_random(rng); + + let path: Vec<(Rc, String)> = path_segments + .iter() + .map(|segment| { + // Create new private directory. + let directory = Rc::new(PrivateDirectory::new( + std::mem::take(&mut working_parent_bare_name), + inumber, + ratchet_seed, + time, + )); + + // Update seeds and the working parent bare name. + (inumber, ratchet_seed) = Self::generate_double_random(rng); + working_parent_bare_name = directory.header.bare_name.clone(); + + (directory, segment.clone()) + }) + .collect(); + + PrivatePathNodes { + path, + tail: Rc::new(PrivateDirectory::new( + std::mem::take(&mut working_parent_bare_name), + inumber, + ratchet_seed, + time, + )), + } + } + + /// Uses specified path segments and their existence in the file tree to generate `PathNodes`. + /// + /// Supports cases where the entire path does not exist. + pub(crate) async fn get_path_nodes( + self: Rc, + path_segments: &[String], + hamt: &PrivateForest, + store: &B, + ) -> Result { + use PathNodesResult::*; + let mut working_node = self; + let mut path_nodes = Vec::with_capacity(path_segments.len()); + + for path_segment in path_segments { + match working_node.lookup_node(path_segment, hamt, store).await? { + Some(PrivateNode::Dir(ref directory)) => { + path_nodes.push((Rc::clone(&working_node), path_segment.clone())); + working_node = Rc::clone(directory); + } + Some(_) => { + let path_nodes = PrivatePathNodes { + path: path_nodes, + tail: Rc::clone(&working_node), + }; + + return Ok(NotADirectory(path_nodes, path_segment.clone())); + } + None => { + let path_nodes = PrivatePathNodes { + path: path_nodes, + tail: Rc::clone(&working_node), + }; + + return Ok(MissingLink(path_nodes, path_segment.clone())); + } + } + } + + Ok(Complete(PrivatePathNodes { + path: path_nodes, + tail: Rc::clone(&working_node), + })) + } + + /// Uses specified path segments to generate `PathNodes`. Creates missing directories as needed. + pub(crate) async fn get_or_create_path_nodes( + self: Rc, + path_segments: &[String], + time: DateTime, + hamt: &PrivateForest, + store: &mut B, + rng: &mut R, + ) -> Result { + use PathNodesResult::*; + match self.get_path_nodes(path_segments, hamt, store).await? { + Complete(path_nodes) => Ok(path_nodes), + NotADirectory(_, _) => error(FsError::InvalidPath), + MissingLink(path_so_far, missing_link) => { + // Get remaining missing path segments. + let missing_path = path_segments.split_at(path_so_far.path.len() + 1).1; + + // Get tail bare name from `path_so_far`. + let parent_bare_name = path_so_far.tail.header.bare_name.clone(); + + // Create missing directories. + let missing_path_nodes = + Self::create_path_nodes(missing_path, time, parent_bare_name, rng); + + Ok(PrivatePathNodes { + path: [ + path_so_far.path, + vec![(path_so_far.tail, missing_link)], + missing_path_nodes.path, + ] + .concat(), + tail: missing_path_nodes.tail, + }) + } + } + } + + /// Fix up `PathNodes` so that parents refer to the newly updated children. + async fn fix_up_path_nodes( + path_nodes: PrivatePathNodes, + hamt: Rc, + store: &mut B, + rng: &mut R, + ) -> Result<(Rc, Rc)> { + let mut working_hamt = Rc::clone(&hamt); + let mut working_child_dir = { + let mut tmp = (*path_nodes.tail).clone(); + tmp.advance_ratchet(); + Rc::new(tmp) + }; + + for (parent_dir, segment) in path_nodes.path.iter().rev() { + let mut parent_dir = (**parent_dir).clone(); + parent_dir.advance_ratchet(); + let child_private_ref = working_child_dir.header.get_private_ref()?; + + parent_dir + .content + .entries + .insert(segment.clone(), child_private_ref.clone()); + + let parent_dir = Rc::new(parent_dir); + + working_hamt = working_hamt + .set( + working_child_dir.header.get_saturated_name(), + &child_private_ref, + &PrivateNode::Dir(Rc::clone(&working_child_dir)), + store, + rng, + ) + .await?; + + working_child_dir = parent_dir; + } + + working_hamt = working_hamt + .set( + working_child_dir.header.get_saturated_name(), + &working_child_dir.header.get_private_ref()?, + &PrivateNode::Dir(Rc::clone(&working_child_dir)), + store, + rng, + ) + .await?; + + Ok((working_child_dir, working_hamt)) + } + + /// Follows a path and fetches the node at the end of the path. + pub async fn get_node( + self: Rc, + path_segments: &[String], + hamt: Rc, + store: &B, + ) -> Result>> { + use PathNodesResult::*; + let root_dir = Rc::clone(&self); + + Ok(match path_segments.split_last() { + Some((path_segment, parent_path)) => { + match self.get_path_nodes(parent_path, &hamt, store).await? { + Complete(parent_path_nodes) => { + let result = parent_path_nodes + .tail + .lookup_node(path_segment, &hamt, store) + .await?; + + PrivateOpResult { + root_dir, + hamt, + result, + } + } + MissingLink(_, _) => bail!(FsError::NotFound), + NotADirectory(_, _) => bail!(FsError::NotFound), + } + } + None => PrivateOpResult { + root_dir, + hamt, + result: Some(PrivateNode::Dir(self)), + }, + }) + } + + /// Reads specified file content from the directory. + pub async fn read( + self: Rc, + path_segments: &[String], + hamt: Rc, + store: &B, + ) -> Result>> { + let root_dir = Rc::clone(&self); + let (path, filename) = utils::split_last(path_segments)?; + + match self.get_path_nodes(path, &hamt, store).await? { + PathNodesResult::Complete(node_path) => { + match node_path.tail.lookup_node(filename, &hamt, store).await? { + Some(PrivateNode::File(file)) => Ok(PrivateOpResult { + root_dir, + hamt, + result: file.content.content.clone(), + }), + Some(PrivateNode::Dir(_)) => error(FsError::NotAFile), + None => error(FsError::NotFound), + } + } + _ => error(FsError::NotFound), + } + } + + /// Writes a file to the directory. + pub async fn write( + self: Rc, + path_segments: &[String], + time: DateTime, + content: Vec, + hamt: Rc, + store: &mut B, + rng: &mut R, + ) -> Result> { + let (directory_path, filename) = utils::split_last(path_segments)?; + + // This will create directories if they don't exist yet + let mut directory_path_nodes = self + .get_or_create_path_nodes(directory_path, time, &hamt, store, rng) + .await?; + + let mut directory = (*directory_path_nodes.tail).clone(); + + // Modify the file if it already exists, otherwise create a new file with expected content + let file = match directory.lookup_node(filename, &hamt, store).await? { + Some(PrivateNode::File(file_before)) => { + let mut file = (*file_before).clone(); + file.content.content = content; + file.content.metadata = Metadata::new(time, UnixFsNodeKind::File); + file + } + Some(PrivateNode::Dir(_)) => bail!(FsError::DirectoryAlreadyExists), + None => { + let (inumber, ratchet_seed) = Self::generate_double_random(rng); + PrivateFile::new( + directory.header.bare_name.clone(), + inumber, + ratchet_seed, + time, + content, + ) + } + }; + + let child_private_ref = file.header.get_private_ref()?; + let hamt = hamt + .set( + file.header.get_saturated_name(), + &child_private_ref, + &PrivateNode::File(Rc::new(file)), + store, + rng, + ) + .await?; + + // Insert the file into its parent directory + directory + .content + .entries + .insert(filename.to_string(), child_private_ref); + + directory_path_nodes.tail = Rc::new(directory); + + let (root_dir, hamt) = + Self::fix_up_path_nodes(directory_path_nodes, hamt, store, rng).await?; + + // Fix up the file path + Ok(PrivateOpResult { + root_dir, + hamt, + result: (), + }) + } + + /// Looks up a node by its path name in the current directory. + pub async fn lookup_node<'a, B: BlockStore>( + &self, + path_segment: &str, + hamt: &PrivateForest, + store: &B, + ) -> Result> { + Ok(match self.content.entries.get(path_segment) { + Some(private_ref) => hamt.get(private_ref, store).await?, + None => None, + }) + } + + /// Creates a new directory at the specified path. + pub async fn mkdir( + self: Rc, + path_segments: &[String], + time: DateTime, + hamt: Rc, + store: &mut B, + rng: &mut R, + ) -> Result> { + let path_nodes = self + .get_or_create_path_nodes(path_segments, time, &hamt, store, rng) + .await?; + + let (root_dir, hamt) = Self::fix_up_path_nodes(path_nodes, hamt, store, rng).await?; + + Ok(PrivateOpResult { + root_dir, + hamt, + result: (), + }) + } + + /// Returns names and metadata of directory's immediate children. + pub async fn ls( + self: Rc, + path_segments: &[String], + hamt: Rc, + store: &B, + ) -> Result>> { + let root_dir = Rc::clone(&self); + match self.get_path_nodes(path_segments, &hamt, store).await? { + PathNodesResult::Complete(path_nodes) => { + let mut result = vec![]; + for (name, private_ref) in path_nodes.tail.content.entries.iter() { + match hamt.get(private_ref, store).await? { + Some(PrivateNode::File(file)) => { + result.push((name.clone(), file.content.metadata.clone())); + } + Some(PrivateNode::Dir(dir)) => { + result.push((name.clone(), dir.content.metadata.clone())); + } + _ => bail!(FsError::NotFound), + } + } + Ok(PrivateOpResult { + root_dir, + hamt, + result, + }) + } + _ => bail!(FsError::NotFound), + } + } + + /// Removes a file or directory from the directory. + pub async fn rm( + self: Rc, + path_segments: &[String], + hamt: Rc, + store: &mut B, + rng: &mut R, + ) -> Result> { + let (directory_path, node_name) = utils::split_last(path_segments)?; + + let mut directory_path_nodes = + match self.get_path_nodes(directory_path, &hamt, store).await? { + PrivatePathNodesResult::Complete(node_path) => node_path, + _ => bail!(FsError::NotFound), + }; + + let mut directory = (*directory_path_nodes.tail).clone(); + + // Remove the entry from its parent directory + let removed_node = match directory.content.entries.remove(node_name) { + Some(ref private_ref) => hamt.get(private_ref, store).await?.unwrap(), + None => bail!(FsError::NotFound), + }; + + directory_path_nodes.tail = Rc::new(directory); + + let (root_dir, hamt) = + Self::fix_up_path_nodes(directory_path_nodes, hamt, store, rng).await?; + + Ok(PrivateOpResult { + root_dir, + hamt, + result: removed_node, + }) + } +} + +impl Id for PrivateDirectory { + fn get_id(&self) -> String { + format!("{:p}", &self.header) + } +} + +//-------------------------------------------------------------------------------------------------- +// Tests +//-------------------------------------------------------------------------------------------------- + +#[cfg(test)] +mod private_directory_tests { + use super::*; + use crate::{utils::TestRng, MemoryBlockStore, HASH_BYTE_SIZE}; + use test_log::test; + + #[test(async_std::test)] + async fn look_up_can_fetch_file_added_to_directory() { + let rng = &mut TestRng(); + let root_dir = Rc::new(PrivateDirectory::new( + Namefilter::default(), + rng.random_bytes::(), + rng.random_bytes::(), + Utc::now(), + )); + let store = &mut MemoryBlockStore::default(); + let hamt = Rc::new(PrivateForest::new()); + + let content = b"Hello, World!".to_vec(); + + let PrivateOpResult { root_dir, hamt, .. } = root_dir + .write( + &["text.txt".into()], + Utc::now(), + content.clone(), + hamt, + store, + rng, + ) + .await + .unwrap(); + + let PrivateOpResult { result, .. } = root_dir + .read(&["text.txt".into()], hamt, store) + .await + .unwrap(); + + assert_eq!(result, content); + } + + #[test(async_std::test)] + async fn look_up_cannot_fetch_file_not_added_to_directory() { + let rng = &mut TestRng(); + let root_dir = Rc::new(PrivateDirectory::new( + Namefilter::default(), + rng.random_bytes::(), + rng.random_bytes::(), + Utc::now(), + )); + let store = &mut MemoryBlockStore::default(); + let hamt = Rc::new(PrivateForest::new()); + + let node = root_dir.lookup_node("Unknown", &hamt, store).await.unwrap(); + + assert!(node.is_none()); + } + + #[test(async_std::test)] + async fn mkdir_can_create_new_directory() { + let rng = &mut TestRng(); + let root_dir = Rc::new(PrivateDirectory::new( + Namefilter::default(), + rng.random_bytes::(), + rng.random_bytes::(), + Utc::now(), + )); + let store = &mut MemoryBlockStore::default(); + let hamt = Rc::new(PrivateForest::new()); + + let PrivateOpResult { root_dir, hamt, .. } = root_dir + .mkdir( + &["tamedun".into(), "pictures".into()], + Utc::now(), + hamt, + store, + rng, + ) + .await + .unwrap(); + + let PrivateOpResult { result, .. } = root_dir + .get_node(&["tamedun".into(), "pictures".into()], hamt, store) + .await + .unwrap(); + + assert!(result.is_some()); + } + + #[test(async_std::test)] + async fn ls_can_list_children_under_directory() { + let rng = &mut TestRng(); + let root_dir = Rc::new(PrivateDirectory::new( + Namefilter::default(), + rng.random_bytes::(), + rng.random_bytes::(), + Utc::now(), + )); + let store = &mut MemoryBlockStore::default(); + let hamt = Rc::new(PrivateForest::new()); + + let PrivateOpResult { root_dir, hamt, .. } = root_dir + .mkdir( + &["tamedun".into(), "pictures".into()], + Utc::now(), + hamt, + store, + rng, + ) + .await + .unwrap(); + + let PrivateOpResult { root_dir, hamt, .. } = root_dir + .write( + &["tamedun".into(), "pictures".into(), "puppy.jpg".into()], + Utc::now(), + b"puppy".to_vec(), + hamt, + store, + rng, + ) + .await + .unwrap(); + + let PrivateOpResult { root_dir, hamt, .. } = root_dir + .mkdir( + &["tamedun".into(), "pictures".into(), "cats".into()], + Utc::now(), + hamt, + store, + rng, + ) + .await + .unwrap(); + + let PrivateOpResult { result, .. } = root_dir + .ls(&["tamedun".into(), "pictures".into()], hamt, store) + .await + .unwrap(); + + assert_eq!(result.len(), 2); + assert_eq!(result[0].0, String::from("cats")); + assert_eq!(result[1].0, String::from("puppy.jpg")); + assert_eq!(result[0].1.unix_fs.kind, UnixFsNodeKind::Dir); + assert_eq!(result[1].1.unix_fs.kind, UnixFsNodeKind::File); + } + + #[test(async_std::test)] + async fn rm_can_remove_children_from_directory() { + let rng = &mut TestRng(); + let root_dir = Rc::new(PrivateDirectory::new( + Namefilter::default(), + rng.random_bytes::(), + rng.random_bytes::(), + Utc::now(), + )); + let store = &mut MemoryBlockStore::default(); + let hamt = Rc::new(PrivateForest::new()); + + let PrivateOpResult { root_dir, hamt, .. } = root_dir + .mkdir( + &["tamedun".into(), "pictures".into()], + Utc::now(), + hamt, + store, + rng, + ) + .await + .unwrap(); + + let PrivateOpResult { root_dir, hamt, .. } = root_dir + .write( + &["tamedun".into(), "pictures".into(), "puppy.jpg".into()], + Utc::now(), + b"puppy".to_vec(), + hamt, + store, + rng, + ) + .await + .unwrap(); + + let PrivateOpResult { root_dir, hamt, .. } = root_dir + .mkdir( + &["tamedun".into(), "pictures".into(), "cats".into()], + Utc::now(), + hamt, + store, + rng, + ) + .await + .unwrap(); + + let PrivateOpResult { root_dir, hamt, .. } = root_dir + .rm(&["tamedun".into(), "pictures".into()], hamt, store, rng) + .await + .unwrap(); + + let result = root_dir + .rm(&["tamedun".into(), "pictures".into()], hamt, store, rng) + .await; + + assert!(result.is_err()); + } + + #[async_std::test] + async fn read_can_fetch_userland_of_file_added_to_directory() { + let rng = &mut TestRng(); + let root_dir = Rc::new(PrivateDirectory::new( + Namefilter::default(), + rng.random_bytes::(), + rng.random_bytes::(), + Utc::now(), + )); + let store = &mut MemoryBlockStore::default(); + let hamt = Rc::new(PrivateForest::new()); + + let PrivateOpResult { root_dir, hamt, .. } = root_dir + .write( + &["text.txt".into()], + Utc::now(), + b"text".to_vec(), + hamt, + store, + rng, + ) + .await + .unwrap(); + + let PrivateOpResult { result, .. } = root_dir + .read(&["text.txt".into()], hamt, store) + .await + .unwrap(); + + assert_eq!(result, b"text".to_vec()); + } + + #[async_std::test] + async fn path_nodes_can_generates_new_path_nodes() { + let store = &mut MemoryBlockStore::default(); + let hamt = Rc::new(PrivateForest::new()); + let rng = &mut TestRng(); + + let path_nodes = PrivateDirectory::create_path_nodes( + &["Documents".into(), "Apps".into()], + Utc::now(), + Namefilter::default(), + rng, + ); + + let (root_dir, hamt) = + PrivateDirectory::fix_up_path_nodes(path_nodes.clone(), hamt, store, rng) + .await + .unwrap(); + + let result = root_dir + .get_path_nodes(&["Documents".into(), "Apps".into()], &hamt, store) + .await + .unwrap(); + + match result { + PathNodesResult::MissingLink(_, segment) => panic!("MissingLink {segment}"), + PathNodesResult::NotADirectory(_, segment) => panic!("NotADirectory {segment}"), + PathNodesResult::Complete(path_nodes_2) => { + assert_eq!(path_nodes.path.len(), path_nodes_2.path.len()); + assert_eq!(path_nodes.path[0].1, path_nodes_2.path[0].1); + assert_eq!(path_nodes.path[1].1, path_nodes_2.path[1].1); + } + } + } +} diff --git a/crates/fs/private/file.rs b/crates/fs/private/file.rs new file mode 100644 index 00000000..3f59cb74 --- /dev/null +++ b/crates/fs/private/file.rs @@ -0,0 +1,50 @@ +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; + +use crate::{HashOutput, Id, Metadata, UnixFsNodeKind}; + +use super::{namefilter::Namefilter, INumber, PrivateNodeHeader}; + +//-------------------------------------------------------------------------------------------------- +// Type Definitions +//-------------------------------------------------------------------------------------------------- + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct PrivateFileContent { + pub(crate) metadata: Metadata, + pub(crate) content: Vec, // Inlined file content. // TODO(appcypher): Support linked file content. +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct PrivateFile { + pub(crate) header: PrivateNodeHeader, + pub(crate) content: PrivateFileContent, +} + +//-------------------------------------------------------------------------------------------------- +// Implementations +//-------------------------------------------------------------------------------------------------- + +impl PrivateFile { + pub fn new( + parent_bare_name: Namefilter, + inumber: INumber, + ratchet_seed: HashOutput, + time: DateTime, + content: Vec, + ) -> Self { + Self { + header: PrivateNodeHeader::new(parent_bare_name, inumber, ratchet_seed), + content: PrivateFileContent { + metadata: Metadata::new(time, UnixFsNodeKind::File), + content, + }, + } + } +} + +impl Id for PrivateFile { + fn get_id(&self) -> String { + format!("{:p}", &self.header) + } +} diff --git a/crates/fs/private/forest.rs b/crates/fs/private/forest.rs new file mode 100644 index 00000000..d3dcc096 --- /dev/null +++ b/crates/fs/private/forest.rs @@ -0,0 +1,176 @@ +use std::rc::Rc; + +use anyhow::Result; +use libipld::Cid; +use log::debug; + +use crate::{BlockStore, HashOutput}; + +use super::{hamt::Hamt, namefilter::Namefilter, Key, PrivateNode, PrivateRef, NONCE_SIZE}; + +//-------------------------------------------------------------------------------------------------- +// Type Definitions +//-------------------------------------------------------------------------------------------------- + +pub type EncryptedPrivateNode = (Option>, Cid); // TODO(appcypher): Change to PrivateLink. +pub type PrivateForest = Hamt; + +pub trait Rng { + fn random_bytes(&mut self) -> [u8; N]; +} + +//-------------------------------------------------------------------------------------------------- +// Implementations +//-------------------------------------------------------------------------------------------------- + +impl PrivateForest { + /// Encrypts supplied bytes with a random nonce and AES key. + pub(crate) fn encrypt(key: &Key, data: &[u8], rng: &mut R) -> Result> { + key.encrypt(&rng.random_bytes::(), data) + } + + /// Sets a new value at the given key. + #[inline] + pub async fn set( + self: Rc, + saturated_name: Namefilter, + private_ref: &PrivateRef, + value: &PrivateNode, + store: &mut B, + rng: &mut R, + ) -> Result> { + debug!("hamt store set: PrivateRef: {:?}", private_ref); + + // Serialize header and content section as dag-cbor bytes. + let (header_bytes, content_bytes) = value.serialize_as_cbor()?; + + // Encrypt header and content section. + let enc_content_bytes = Self::encrypt(&private_ref.content_key.0, &content_bytes, rng)?; + let enc_header_bytes = Some(Self::encrypt( + &private_ref.ratchet_key.0, + &header_bytes, + rng, + )?); + + // Store content section in blockstore and get Cid. + let content_cid = store + .put_block(enc_content_bytes, libipld::IpldCodec::Raw) + .await?; + + // Store header and Cid in root node. + self.set_encrypted(saturated_name, (enc_header_bytes, content_cid), store) + .await + } + + /// Gets the value at the given key. + #[inline] + pub async fn get( + &self, + private_ref: &PrivateRef, + store: &B, + ) -> Result> { + debug!("hamt store get: PrivateRef: {:?}", private_ref); + + // Fetch encrypted header and Cid from root node. + let (enc_header_bytes, content_cid) = match self + .get_encrypted(&private_ref.saturated_name_hash, store) + .await? + { + Some(value) => value, + None => return Ok(None), + }; + + // Fetch encrypted content section from blockstore. + let enc_content_bytes = store.get_block(content_cid).await?; + + // Decrypt header and content section. + let content_bytes = private_ref.content_key.0.decrypt(&enc_content_bytes)?; + let header_bytes = match enc_header_bytes { + Some(enc_header_bytes) => Some(private_ref.ratchet_key.0.decrypt(enc_header_bytes)?), + _ => None, + }; + + // Deserialize header and content section. + Ok(Some(PrivateNode::deserialize_from_cbor( + &header_bytes, + &content_bytes, + )?)) + } + + /// Sets a new encrypted value at the given key. + #[inline] + pub async fn set_encrypted( + self: Rc, + name: Namefilter, + value: EncryptedPrivateNode, + store: &mut B, + ) -> Result> { + let mut cloned = (*self).clone(); + cloned.root = self.root.set(name, value, store).await?; + Ok(Rc::new(cloned)) + } + + /// Gets the encrypted value at the given key. + #[inline] + pub async fn get_encrypted<'b, B: BlockStore>( + &'b self, + name_hash: &HashOutput, + store: &B, + ) -> Result> { + self.root.get_by_hash(name_hash, store).await + } + + /// Removes the encrypted value at the given key. + pub async fn remove_encrypted( + self: Rc, + name_hash: &HashOutput, + store: &mut B, + ) -> Result<(Rc, Option)> { + let mut cloned = (*self).clone(); + let (root, value) = self.root.remove_by_hash(name_hash, store).await?; + cloned.root = root; + Ok((Rc::new(cloned), value)) + } +} + +// //-------------------------------------------------------------------------------------------------- +// // Tests +// //-------------------------------------------------------------------------------------------------- + +#[cfg(test)] +mod hamt_store_tests { + use std::rc::Rc; + use test_log::test; + + use chrono::Utc; + + use super::*; + use crate::{private::PrivateDirectory, utils::TestRng, MemoryBlockStore}; + + #[test(async_std::test)] + async fn inserted_items_can_be_fetched() { + let store = &mut MemoryBlockStore::new(); + let hamt = Rc::new(PrivateForest::new()); + let rng = &mut TestRng(); + + let dir = Rc::new(PrivateDirectory::new( + Namefilter::default(), + rng.random_bytes::<32>(), + rng.random_bytes::<32>(), + Utc::now(), + )); + + let private_ref = dir.header.get_private_ref().unwrap(); + let saturated_name = dir.header.get_saturated_name(); + let private_node = PrivateNode::Dir(dir.clone()); + + let hamt = hamt + .set(saturated_name, &private_ref, &private_node, store, rng) + .await + .unwrap(); + + let retrieved = hamt.get(&private_ref, store).await.unwrap().unwrap(); + + assert_eq!(retrieved, private_node); + } +} diff --git a/crates/fs/private/hamt/constants.rs b/crates/fs/private/hamt/constants.rs index e0323355..2ed3df8a 100644 --- a/crates/fs/private/hamt/constants.rs +++ b/crates/fs/private/hamt/constants.rs @@ -3,4 +3,4 @@ use semver::Version; pub const HAMT_BITMASK_BIT_SIZE: usize = 16; pub const HAMT_BITMASK_BYTE_SIZE: usize = HAMT_BITMASK_BIT_SIZE / 8; pub const HAMT_VALUES_BUCKET_SIZE: usize = 3; -pub const HAMT_VERSION: Version = Version::new(1, 0, 0); +pub const HAMT_VERSION: Version = Version::new(0, 1, 0); diff --git a/crates/fs/private/hamt/error.rs b/crates/fs/private/hamt/error.rs index 40af2ca1..63693835 100644 --- a/crates/fs/private/hamt/error.rs +++ b/crates/fs/private/hamt/error.rs @@ -4,8 +4,10 @@ use thiserror::Error; pub enum HamtError { #[error("Hashnibbles cursor has exceeded HashOutput length")] CursorOutOfBounds, + #[error("Cannot canonicalize a link pointer to a node with zero pointer")] NonCanonicalizablePointer, + #[error("Values pointer expected")] ValuesPointerExpected, } diff --git a/crates/fs/private/hamt/hamt.rs b/crates/fs/private/hamt/hamt.rs index d8c9e97e..dbcd87fe 100644 --- a/crates/fs/private/hamt/hamt.rs +++ b/crates/fs/private/hamt/hamt.rs @@ -2,7 +2,7 @@ use std::{collections::BTreeMap, rc::Rc, str::FromStr}; use anyhow::Result; use async_trait::async_trait; -use libipld::{serde as ipld_serde, Ipld}; +use libipld::{serde as ipld_serde, Cid, Ipld}; use semver::Version; use serde::{ de::{DeserializeOwned, Error as DeError}, @@ -10,7 +10,7 @@ use serde::{ Deserialize, Deserializer, Serialize, Serializer, }; -use crate::{AsyncSerialize, BlockStore}; +use crate::{AsyncSerialize, ReferenceableStore}; use super::{Node, HAMT_VERSION}; @@ -20,12 +20,12 @@ use super::{Node, HAMT_VERSION}; #[derive(Debug, Clone, PartialEq)] pub struct Hamt { - root: Rc>, - version: Version, - structure: Structure, + pub root: Rc>, + pub version: Version, + pub structure: Structure, } -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] pub enum Structure { HAMT, } @@ -35,6 +35,15 @@ pub enum Structure { //-------------------------------------------------------------------------------------------------- impl Hamt { + /// Creates a new empty Hamt. + pub fn new() -> Self { + Self { + root: Rc::new(Node::default()), + version: HAMT_VERSION, + structure: Structure::HAMT, + } + } + /// Creates a new `Hamt` with the given root node. pub fn with_root(root: Rc>) -> Self { Self { @@ -45,7 +54,10 @@ impl Hamt { } /// Converts a HAMT to an IPLD object. - pub async fn to_ipld(&self, store: &mut B) -> Result + pub async fn to_ipld + ?Sized>( + &self, + store: &mut RS, + ) -> Result where K: Serialize, V: Serialize, @@ -64,11 +76,13 @@ where K: Serialize, V: Serialize, { - async fn async_serialize( - &self, - serializer: S, - store: &mut B, - ) -> Result { + type StoreRef = Cid; + + async fn async_serialize(&self, serializer: S, store: &mut RS) -> Result + where + S: Serializer, + RS: ReferenceableStore + ?Sized, + { self.to_ipld(store) .await .map_err(SerError::custom)? @@ -147,13 +161,18 @@ impl TryFrom<&str> for Structure { } } +impl Default for Hamt { + fn default() -> Self { + Self::new() + } +} + //-------------------------------------------------------------------------------------------------- // Tests //-------------------------------------------------------------------------------------------------- #[cfg(test)] mod hamt_tests { - use super::*; use crate::{dagcbor, MemoryBlockStore}; diff --git a/crates/fs/private/hamt/hash.rs b/crates/fs/private/hamt/hash.rs index cd57a6d4..b3998198 100644 --- a/crates/fs/private/hamt/hash.rs +++ b/crates/fs/private/hamt/hash.rs @@ -23,7 +23,7 @@ pub trait Hasher { #[derive(Debug, Clone)] pub struct HashNibbles<'a> { - digest: &'a HashOutput, + pub digest: &'a HashOutput, cursor: usize, } diff --git a/crates/fs/private/hamt/mod.rs b/crates/fs/private/hamt/mod.rs index 210e13f9..1ede6d8a 100644 --- a/crates/fs/private/hamt/mod.rs +++ b/crates/fs/private/hamt/mod.rs @@ -11,5 +11,6 @@ mod pointer; pub(crate) use constants::*; pub use hamt::*; +pub use hash::*; pub use node::*; pub use pointer::*; diff --git a/crates/fs/private/hamt/node.rs b/crates/fs/private/hamt/node.rs index 978145e5..02b731ff 100644 --- a/crates/fs/private/hamt/node.rs +++ b/crates/fs/private/hamt/node.rs @@ -1,12 +1,15 @@ use std::{fmt::Debug, marker::PhantomData, rc::Rc}; -use crate::{private::HAMT_VALUES_BUCKET_SIZE, AsyncSerialize, BlockStore, Link}; +use crate::{ + private::hamt::HAMT_VALUES_BUCKET_SIZE, AsyncSerialize, BlockStore, HashOutput, Link, + ReferenceableStore, +}; use anyhow::{bail, Result}; use async_recursion::async_recursion; use async_trait::async_trait; use bitvec::array::BitArray; -use libipld::{serde as ipld_serde, Ipld}; +use libipld::{serde as ipld_serde, Cid, Ipld}; use log::debug; use serde::{ de::{Deserialize, DeserializeOwned}, @@ -43,20 +46,20 @@ where impl Node where - K: DeserializeOwned + Serialize + AsRef<[u8]> + Clone + Eq + PartialOrd + Debug, + K: DeserializeOwned + Serialize + Clone + Debug + AsRef<[u8]>, V: DeserializeOwned + Serialize + Clone + Debug, H: Hasher + Clone + Debug, { /// Sets a new value at the given key. pub async fn set( - self: Rc, + self: &Rc, key: K, value: V, store: &mut B, ) -> Result> { let hash = &H::hash(&key); debug!("set: hash = {:02x?}", hash); - self.modify_value(&mut HashNibbles::new(hash), key, value, store) + self.set_value(&mut HashNibbles::new(hash), key, value, store) .await } @@ -69,20 +72,42 @@ where let hash = &H::hash(key); debug!("get: hash = {:02x?}", hash); Ok(self - .get_value(&mut HashNibbles::new(hash), key, store) + .get_value(&mut HashNibbles::new(hash), store) .await? .map(|pair| &pair.value)) } /// Removes the value at the given key. pub async fn remove<'a, B: BlockStore>( - self: Rc, + self: &Rc, key: &K, store: &B, - ) -> Result<(Rc, Option)> { + ) -> Result<(Rc, Option>)> { let hash = &H::hash(key); debug!("remove: hash = {:02x?}", hash); - self.remove_value(&mut HashNibbles::new(hash), key, store) + self.remove_value(&mut HashNibbles::new(hash), store).await + } + + /// Gets the value at the key matching the provided hash. + pub async fn get_by_hash<'a, B: BlockStore>( + self: &'a Rc, + hash: &HashOutput, + store: &B, + ) -> Result> { + debug!("get_by_hash: hash = {:02x?}", hash); + Ok(self + .get_value(&mut HashNibbles::new(hash), store) + .await? + .map(|pair| &pair.value)) + } + + /// Removes the value at the key matching the provided hash. + pub async fn remove_by_hash<'a, B: BlockStore>( + self: &Rc, + hash: &HashOutput, + store: &B, + ) -> Result<(Rc, Option)> { + self.remove_value(&mut HashNibbles::new(hash), store) .await .map(|(node, pair)| (node, pair.map(|pair| pair.value))) } @@ -107,7 +132,7 @@ where } #[async_recursion(?Send)] - pub(super) async fn modify_value<'a, 'b, B: BlockStore>( + pub async fn set_value<'a, 'b, B: BlockStore>( self: &'a Rc, hashnibbles: &'b mut HashNibbles, key: K, @@ -118,7 +143,7 @@ where let value_index = self.get_value_index(bit_index); debug!( - "modify_value: bit_index = {}, value_index = {}", + "set_value: bit_index = {}, value_index = {}", bit_index, value_index ); @@ -139,7 +164,10 @@ where let mut node = (**self).clone(); let pointers: Pointer<_, _, H> = { let mut values = (*values).clone(); - if let Some(i) = values.iter().position(|p| p.key == key) { + if let Some(i) = values + .iter() + .position(|p| &H::hash(&p.key) == hashnibbles.digest) + { // If the key is already present, update the value. values[i] = Pair::new(key, value); Pointer::Values(values) @@ -149,7 +177,7 @@ where // Insert in order of key. let index = values .iter() - .position(|p| p.key > key) + .position(|p| &H::hash(&p.key) > hashnibbles.digest) .unwrap_or(values.len()); values.insert(index, Pair::new(key, value)); Pointer::Values(values) @@ -162,9 +190,8 @@ where { let hash = &H::hash(&key); let hashnibbles = &mut HashNibbles::with_cursor(hash, cursor); - sub_node = sub_node - .modify_value(hashnibbles, key, value, store) - .await?; + sub_node = + sub_node.set_value(hashnibbles, key, value, store).await?; } Pointer::Link(Link::from(sub_node)) } @@ -176,7 +203,7 @@ where } Pointer::Link(link) => { let child = Rc::clone(link.resolve_value(store).await?); - let child = child.modify_value(hashnibbles, key, value, store).await?; + let child = child.set_value(hashnibbles, key, value, store).await?; let mut node = (**self).clone(); node.pointers[value_index] = Pointer::Link(Link::from(child)); Rc::new(node) @@ -185,10 +212,9 @@ where } #[async_recursion(?Send)] - pub(super) async fn get_value<'a, 'b, B: BlockStore>( + pub async fn get_value<'a, 'b, B: BlockStore>( self: &'a Rc, hashnibbles: &'b mut HashNibbles, - key: &K, store: &B, ) -> Result>> { let bit_index = hashnibbles.try_next()?; @@ -200,19 +226,22 @@ where let value_index = self.get_value_index(bit_index); match &self.pointers[value_index] { - Pointer::Values(values) => Ok(values.iter().find(|kv| key.eq(&kv.key))), + Pointer::Values(values) => Ok({ + values + .iter() + .find(|p| &H::hash(&p.key) == hashnibbles.digest) + }), Pointer::Link(link) => { let child = link.resolve_value(store).await?; - child.get_value(hashnibbles, key, store).await + child.get_value(hashnibbles, store).await } } } #[async_recursion(?Send)] - pub(super) async fn remove_value<'a, 'b, B: BlockStore>( + pub async fn remove_value<'a, 'b, B: BlockStore>( self: &'a Rc, hashnibbles: &'b mut HashNibbles, - key: &K, store: &B, ) -> Result<(Rc, Option>)> { let bit_index = hashnibbles.try_next()?; @@ -227,6 +256,10 @@ where Pointer::Values(values) => { let mut node = (**self).clone(); let value = if values.len() == 1 { + // If the key doesn't match, return without removing. + if &H::hash(&values[0].key) != hashnibbles.digest { + return Ok((Rc::clone(self), None)); + } // If there is only one value, we can remove the entire pointer. node.bitmask.set(bit_index, false); match node.pointers.remove(value_index) { @@ -236,18 +269,21 @@ where } else { // Otherwise, remove just the value. let mut values = (*values).clone(); - values.iter().position(|p| p.key == *key).map(|i| { - let value = values.remove(i); - node.pointers[value_index] = Pointer::Values(values); - value - }) + values + .iter() + .position(|p| &H::hash(&p.key) == hashnibbles.digest) + .map(|i| { + let value = values.remove(i); + node.pointers[value_index] = Pointer::Values(values); + value + }) }; (Rc::new(node), value) } Pointer::Link(link) => { let child = Rc::clone(link.resolve_value(store).await?); - let (child, value) = child.remove_value(hashnibbles, key, store).await?; + let (child, value) = child.remove_value(hashnibbles, store).await?; let mut node = (**self).clone(); if value.is_some() { @@ -288,7 +324,10 @@ impl Node { } /// Converts a Node to an IPLD object. - pub async fn to_ipld(&self, store: &mut B) -> Result + pub async fn to_ipld + ?Sized>( + &self, + store: &mut RS, + ) -> Result where K: Serialize, V: Serialize, @@ -323,11 +362,13 @@ where V: Serialize, H: Hasher, { - async fn async_serialize( - &self, - serializer: S, - store: &mut B, - ) -> Result { + type StoreRef = Cid; + + async fn async_serialize(&self, serializer: S, store: &mut RS) -> Result + where + S: Serializer, + RS: ReferenceableStore + ?Sized, + { self.to_ipld(store) .await .map_err(SerError::custom)? @@ -370,9 +411,9 @@ where //-------------------------------------------------------------------------------------------------- #[cfg(test)] -mod hamt_node_tests { +mod hamt_node_unit_tests { use super::*; - use crate::{dagcbor, HashOutput, MemoryBlockStore}; + use crate::{HashOutput, MemoryBlockStore}; use lazy_static::lazy_static; use test_log::test; @@ -396,12 +437,7 @@ mod hamt_node_tests { impl Hasher for MockHasher { fn hash>(key: &K) -> HashOutput { let s = std::str::from_utf8(key.as_ref()).unwrap(); - HASH_KV_PAIRS - .iter() - .find(|(_, v)| s == *v) - .unwrap() - .0 - .clone() + HASH_KV_PAIRS.iter().find(|(_, v)| s == *v).unwrap().0 } } @@ -412,20 +448,17 @@ mod hamt_node_tests { // Insert 4 values to trigger the creation of a linked node. let mut working_node = Rc::new(Node::::default()); for (digest, kv) in HASH_KV_PAIRS.iter() { - let hashnibbles = &mut HashNibbles::new(&digest); + let hashnibbles = &mut HashNibbles::new(digest); working_node = working_node - .modify_value(hashnibbles, kv.to_string(), kv.to_string(), store) + .set_value(hashnibbles, kv.to_string(), kv.to_string(), store) .await .unwrap(); } // Get the values. for (digest, kv) in HASH_KV_PAIRS.iter() { - let hashnibbles = &mut HashNibbles::new(&digest); - let value = working_node - .get_value(hashnibbles, &kv.to_string(), store) - .await - .unwrap(); + let hashnibbles = &mut HashNibbles::new(digest); + let value = working_node.get_value(hashnibbles, store).await.unwrap(); assert_eq!(value, Some(&Pair::new(kv.to_string(), kv.to_string()))); } @@ -438,9 +471,9 @@ mod hamt_node_tests { // Insert 4 values to trigger the creation of a linked node. let mut working_node = Rc::new(Node::::default()); for (digest, kv) in HASH_KV_PAIRS.iter() { - let hashnibbles = &mut HashNibbles::new(&digest); + let hashnibbles = &mut HashNibbles::new(digest); working_node = working_node - .modify_value(hashnibbles, kv.to_string(), kv.to_string(), store) + .set_value(hashnibbles, kv.to_string(), kv.to_string(), store) .await .unwrap(); } @@ -450,7 +483,7 @@ mod hamt_node_tests { // Remove the third value. let third_hashnibbles = &mut HashNibbles::new(&HASH_KV_PAIRS[2].0); working_node = working_node - .remove_value(third_hashnibbles, &"third".to_string(), store) + .remove_value(third_hashnibbles, store) .await .unwrap() .0; @@ -464,7 +497,7 @@ mod hamt_node_tests { } let value = working_node - .get_value(third_hashnibbles, &"third".to_string(), store) + .get_value(third_hashnibbles, store) .await .unwrap(); @@ -472,16 +505,16 @@ mod hamt_node_tests { } #[test(async_std::test)] - async fn modify_value_splits_when_bucket_threshold_reached() { + async fn set_value_splits_when_bucket_threshold_reached() { let store = &mut MemoryBlockStore::default(); // Insert 3 values into the HAMT. let mut working_node = Rc::new(Node::::default()); for (idx, (digest, kv)) in HASH_KV_PAIRS.iter().take(3).enumerate() { let kv = kv.to_string(); - let hashnibbles = &mut HashNibbles::new(&digest); + let hashnibbles = &mut HashNibbles::new(digest); working_node = working_node - .modify_value(hashnibbles, kv.clone(), kv.clone(), store) + .set_value(hashnibbles, kv.clone(), kv.clone(), store) .await .unwrap(); @@ -497,7 +530,7 @@ mod hamt_node_tests { // Inserting the fourth value should introduce a link indirection. working_node = working_node - .modify_value( + .set_value( &mut HashNibbles::new(&HASH_KV_PAIRS[3].0), "fourth".to_string(), "fourth".to_string(), @@ -544,7 +577,7 @@ mod hamt_node_tests { let hashnibbles = &mut HashNibbles::new(&bytes); working_node = working_node - .modify_value( + .set_value( hashnibbles, expected_idx.to_string(), expected_idx.to_string(), @@ -573,22 +606,332 @@ mod hamt_node_tests { .await .unwrap(); - let value = node.get(&"pill".into(), &mut store).await.unwrap().unwrap(); + let value = node.get(&"pill".into(), &store).await.unwrap().unwrap(); assert_eq!(value, &(10, 0.315)); } #[test(async_std::test)] - async fn node_can_encode_decode_as_cbor() { + async fn node_is_same_with_irrelevant_remove() { + // These two keys' hashes have the same first nibble (7) + let insert_key: String = "GL59 Tg4phDb bv".into(); + let remove_key: String = "hK i3b4V4152EPOdA".into(); + let store = &mut MemoryBlockStore::default(); - let node: Rc> = Rc::new(Node::default()); + let mut node0: Rc> = Rc::new(Node::default()); + + node0 = node0.set(insert_key.clone(), 0, store).await.unwrap(); + (node0, _) = node0.remove(&remove_key, store).await.unwrap(); + + assert_eq!(node0.count_values().unwrap(), 1); + } + + #[test(async_std::test)] + async fn node_history_independence_regression() { + let store = &mut MemoryBlockStore::default(); + + let mut node1: Rc> = Rc::new(Node::default()); + let mut node2: Rc> = Rc::new(Node::default()); + + node1 = node1.set("key 17".into(), 508, store).await.unwrap(); + node1 = node1.set("key 81".into(), 971, store).await.unwrap(); + node1 = node1.set("key 997".into(), 365, store).await.unwrap(); + (node1, _) = node1.remove(&"key 17".into(), store).await.unwrap(); + node1 = node1.set("key 68".into(), 870, store).await.unwrap(); + node1 = node1.set("key 304".into(), 331, store).await.unwrap(); + + node2 = node2.set("key 81".into(), 971, store).await.unwrap(); + node2 = node2.set("key 17".into(), 508, store).await.unwrap(); + node2 = node2.set("key 997".into(), 365, store).await.unwrap(); + node2 = node2.set("key 304".into(), 331, store).await.unwrap(); + node2 = node2.set("key 68".into(), 870, store).await.unwrap(); + (node2, _) = node2.remove(&"key 17".into(), store).await.unwrap(); + + let cid1 = store.put_async_serializable(&node1).await.unwrap(); + let cid2 = store.put_async_serializable(&node2).await.unwrap(); + + assert_eq!(cid1, cid2); + } +} + +#[cfg(test)] +mod hamt_node_prop_tests { + + use std::collections::HashMap; + use std::hash::Hash; + + use proptest::collection::*; + use proptest::prelude::*; + use proptest::strategy::Shuffleable; + use test_strategy::proptest; + + use crate::dagcbor; + use crate::MemoryBlockStore; + + use super::*; + + #[derive(Debug, Clone)] + enum Operation { + Insert(K, V), + Remove(K), + } + + impl Operation { + pub fn can_be_swapped_with(&self, other: &Operation) -> bool + where + K: PartialEq, + V: PartialEq, + { + match (self, other) { + (Operation::Insert(key_a, val_a), Operation::Insert(key_b, val_b)) => { + // We can't swap if the keys are the same and values different. + // Because in those cases operation order matters. + // E.g. insert "a" 10, insert "a" 11 != insert "a" 11, insert "a" 10 + // But insert "a" 10, insert "b" 11 == insert "b" 11, insert "a" 10 + // Or insert "a" 10, insert "a" 10 == insert "a" 10, insert "a" 10 ('swapped') + key_a != key_b || val_a == val_b + } + (Operation::Insert(key_i, _), Operation::Remove(key_r)) => { + // We can only swap if these two operations are unrelated. + // Otherwise order matters. + // E.g. insert "a" 10, remove "a" != remove "a", insert "a" 10 + key_i != key_r + } + (Operation::Remove(key_r), Operation::Insert(key_i, _)) => { + // same as above + key_i != key_r + } + (Operation::Remove(_), Operation::Remove(_)) => { + // Removes can always be swapped + true + } + } + } + } + + #[derive(Debug, Clone)] + struct Operations(Vec>); + + impl Shuffleable for Operations { + fn shuffle_len(&self) -> usize { + self.0.len() + } + + /// Swaps the values if that wouldn't change the semantics. + /// Otherwise it's a no-op. + fn shuffle_swap(&mut self, a: usize, b: usize) { + use std::cmp; + if a == b { + return; + } + let min = cmp::min(a, b); + let max = cmp::max(a, b); + let left = &self.0[min]; + let right = &self.0[max]; + + for i in min..=max { + let neighbor = &self.0[i]; + if !left.can_be_swapped_with(neighbor) { + return; + } + if !right.can_be_swapped_with(neighbor) { + return; + } + } + + // The reasoning for why this works now, is following: + // Let's look at an example. We checked that we can do all of these swaps: + // a x y z b + // x a y z b + // x y a z b + // x y z a b + // x y z b a + // x y b z a + // x b y z a + // b x y z a + // Observe how a moves to the right + // and b moves to the left. + // The end result is the same as + // just swapping a and b. + // With all calls to `can_be_swapped_with` above + // we've made sure that this operation is now safe. + + self.0.swap(a, b); + } + } + + async fn node_from_operations( + operations: Operations, + store: &mut B, + ) -> Result>> + where + K: DeserializeOwned + Serialize + Clone + Debug + AsRef<[u8]>, + V: DeserializeOwned + Serialize + Clone + Debug, + { + let mut node: Rc> = Rc::new(Node::default()); + for op in operations.0 { + match op { + Operation::Insert(key, value) => { + node = node.set(key.clone(), value, store).await?; + } + Operation::Remove(key) => { + (node, _) = node.remove(&key, store).await?; + } + }; + } + + Ok(node) + } + + fn hash_map_from_operations( + operations: Operations, + ) -> HashMap { + let mut map = HashMap::default(); + for op in operations.0 { + match op { + Operation::Insert(key, value) => { + map.insert(key, value); + } + Operation::Remove(key) => { + map.remove(&key); + } + } + } + map + } + + fn small_key() -> impl Strategy { + (0..1000).prop_map(|i| format!("key {i}")) + } + + fn operation( + key: impl Strategy, + value: impl Strategy, + ) -> impl Strategy> { + (any::(), key, value).prop_map(|(is_insert, key, value)| { + if is_insert { + Operation::Insert(key, value) + } else { + Operation::Remove(key) + } + }) + } + + fn operations( + key: impl Strategy, + value: impl Strategy, + size: impl Into, + ) -> impl Strategy> { + vec(operation(key, value), size).prop_map(|vec| Operations(vec)) + } + + fn operations_and_shuffled( + key: impl Strategy, + value: impl Strategy, + size: impl Into, + ) -> impl Strategy, Operations)> { + operations(key, value, size) + .prop_flat_map(|operations| (Just(operations.clone()), Just(operations).prop_shuffle())) + } + + #[proptest(cases = 50)] + fn test_insert_idempotence( + #[strategy(operations(small_key(), 0u64..1000, 0..100))] operations: Operations< + String, + u64, + >, + #[strategy(small_key())] key: String, + #[strategy(0..1000u64)] value: u64, + ) { + async_std::task::block_on(async move { + let store = &mut MemoryBlockStore::default(); + let node = node_from_operations(operations, store).await.unwrap(); + + node.set(key.clone(), value, store).await.unwrap(); + let cid1 = store.put_async_serializable(&node).await.unwrap(); + + node.set(key, value, store).await.unwrap(); + let cid2 = store.put_async_serializable(&node).await.unwrap(); + + assert_eq!(cid1, cid2); + }) + } + + #[proptest(cases = 50)] + fn test_remove_idempotence( + #[strategy(operations(small_key(), 0u64..1000, 0..100))] operations: Operations< + String, + u64, + >, + #[strategy(small_key())] key: String, + ) { + async_std::task::block_on(async move { + let store = &mut MemoryBlockStore::default(); + let node = node_from_operations(operations, store).await.unwrap(); + + node.remove(&key, store).await.unwrap(); + let cid1 = store.put_async_serializable(&node).await.unwrap(); + + node.remove(&key, store).await.unwrap(); + let cid2 = store.put_async_serializable(&node).await.unwrap(); + + assert_eq!(cid1, cid2); + }) + } + + #[proptest(cases = 100)] + fn node_can_encode_decode_as_cbor( + #[strategy(operations(small_key(), 0u64..1000, 0..1000))] operations: Operations< + String, + u64, + >, + ) { + async_std::task::block_on(async move { + let store = &mut MemoryBlockStore::default(); + let node = node_from_operations(operations, store).await.unwrap(); + + let encoded_node = dagcbor::async_encode(&node, store).await.unwrap(); + let decoded_node = dagcbor::decode::>(encoded_node.as_ref()).unwrap(); + + assert_eq!(*node, decoded_node); + }) + } + + #[proptest(cases = 1000, max_shrink_iters = 10_000)] + fn node_operations_are_history_independent( + #[strategy(operations_and_shuffled(small_key(), 0u64..1000, 0..100))] pair: ( + Operations, + Operations, + ), + ) { + async_std::task::block_on(async move { + let (original, shuffled) = pair; + + let store = &mut MemoryBlockStore::default(); + + let node1 = node_from_operations(original, store).await.unwrap(); + let node2 = node_from_operations(shuffled, store).await.unwrap(); + + let cid1 = store.put_async_serializable(&node1).await.unwrap(); + let cid2 = store.put_async_serializable(&node2).await.unwrap(); + + assert_eq!(cid1, cid2); + }) + } - let node = node.set("James".into(), 4500, store).await.unwrap(); - let node = node.set("Peter".into(), 2000, store).await.unwrap(); + // This is sort of a "control group" for making sure that operations_and_shuffled is correct. + #[proptest(cases = 200, max_shrink_iters = 10_000)] + fn hash_map_is_history_independent( + #[strategy(operations_and_shuffled(small_key(), 0u64..1000, 0..1000))] pair: ( + Operations, + Operations, + ), + ) { + let (original, shuffled) = pair; - let encoded_node = dagcbor::async_encode(&node, store).await.unwrap(); - let decoded_node = dagcbor::decode::>(encoded_node.as_ref()).unwrap(); + let map1 = hash_map_from_operations(original); + let map2 = hash_map_from_operations(shuffled); - assert_eq!(*node, decoded_node); + assert_eq!(map1, map2); } } diff --git a/crates/fs/private/hamt/pointer.rs b/crates/fs/private/hamt/pointer.rs index 672d4f32..57ec55f5 100644 --- a/crates/fs/private/hamt/pointer.rs +++ b/crates/fs/private/hamt/pointer.rs @@ -2,7 +2,7 @@ use std::rc::Rc; use anyhow::Result; use async_trait::async_trait; -use libipld::{serde as ipld_serde, Ipld}; +use libipld::{serde as ipld_serde, Cid, Ipld}; use serde::{ de::{DeserializeOwned, Error as DeError}, @@ -10,7 +10,7 @@ use serde::{ Deserialize, Deserializer, Serialize, Serializer, }; -use crate::{error, AsyncSerialize, BlockStore, Link}; +use crate::{error, AsyncSerialize, BlockStore, Link, ReferenceableStore}; use super::{error::HamtError, hash::Hasher, Node, HAMT_VALUES_BUCKET_SIZE}; @@ -48,7 +48,7 @@ impl Pointer { /// Converts a Link pointer to a canonical form to ensure consistent tree representation after deletes. pub async fn canonicalize(self, store: &B) -> Result> where - K: DeserializeOwned + PartialOrd + Clone, + K: DeserializeOwned + Clone + AsRef<[u8]>, V: DeserializeOwned + Clone, H: Clone, { @@ -72,7 +72,14 @@ impl Pointer { .flatten() .collect::>(); - values.sort_unstable_by(|a, b| a.key.partial_cmp(&b.key).unwrap()); + // Bail if it's more values that we can fit into a bucket + if values.len() > HAMT_VALUES_BUCKET_SIZE { + return Ok(Some(Pointer::Link(Link::from(node)))); + } + + values.sort_unstable_by(|a, b| { + H::hash(&a.key).partial_cmp(&H::hash(&b.key)).unwrap() + }); Ok(Some(Pointer::Values(values))) } @@ -84,7 +91,10 @@ impl Pointer { } /// Converts a Pointer to an IPLD object. - pub async fn to_ipld(&self, store: &mut B) -> Result + pub async fn to_ipld + ?Sized>( + &self, + store: &mut RS, + ) -> Result where K: Serialize, V: Serialize, @@ -102,11 +112,13 @@ where K: Serialize, V: Serialize, { - async fn async_serialize( - &self, - serializer: S, - store: &mut B, - ) -> Result { + type StoreRef = Cid; + + async fn async_serialize(&self, serializer: S, store: &mut RS) -> Result + where + S: Serializer, + RS: ReferenceableStore + ?Sized, + { match self { Pointer::Values(vals) => vals.serialize(serializer), Pointer::Link(link) => link diff --git a/crates/fs/private/key.rs b/crates/fs/private/key.rs new file mode 100644 index 00000000..ade6b89a --- /dev/null +++ b/crates/fs/private/key.rs @@ -0,0 +1,108 @@ +use std::fmt::Debug; + +use aes_gcm::aead::{Aead, NewAead}; +use aes_gcm::{Aes256Gcm, Key as AesKey, Nonce}; +use anyhow::Result; +use serde::{Deserialize, Serialize}; + +use crate::FsError; + +use super::Rng; + +//-------------------------------------------------------------------------------------------------- +// Contants +//-------------------------------------------------------------------------------------------------- + +pub(crate) const NONCE_SIZE: usize = 12; + +//-------------------------------------------------------------------------------------------------- +// Type Definitions +//-------------------------------------------------------------------------------------------------- + +#[derive(Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct Key(pub(super) [u8; 32]); + +//-------------------------------------------------------------------------------------------------- +// Implementations +//-------------------------------------------------------------------------------------------------- + +impl Key { + /// Creates a new key from [u8; 32]. + pub fn new(bytes: [u8; 32]) -> Self { + Self(bytes) + } + + /// Encrypts the given plaintext using the key. + pub fn encrypt(&self, nonce_bytes: &[u8; NONCE_SIZE], data: &[u8]) -> Result> { + let nonce = Nonce::from_slice(nonce_bytes); + + let cipher_text = Aes256Gcm::new(AesKey::from_slice(&self.0)) + .encrypt(nonce, data) + .map_err(|e| FsError::UnableToEncrypt(format!("{}", e)))?; + + Ok([nonce_bytes.to_vec(), cipher_text].concat()) + } + + /// Decrypts the given ciphertext using the key. + pub fn decrypt(&self, cipher_text: &[u8]) -> Result> { + let (nonce_bytes, data) = cipher_text.split_at(NONCE_SIZE); + + Ok(Aes256Gcm::new(AesKey::from_slice(&self.0)) + .decrypt(Nonce::from_slice(nonce_bytes), data) + .map_err(|e| FsError::UnableToDecrypt(format!("{}", e)))?) + } + + /// Generates a nonce that can be used to encrypt data. + #[inline] + pub fn generate_nonce(rng: &mut R) -> [u8; NONCE_SIZE] + where + R: Rng, + { + rng.random_bytes::() + } + + /// Grabs the bytes of the key. + pub fn bytes(self) -> [u8; 32] { + self.0 + } + + /// Gets the bytes of the key. + pub fn as_bytes(&self) -> &[u8] { + &self.0 + } +} + +impl Debug for Key { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "Key(0x{:02X?})", &self.0[..5]) + } +} + +//-------------------------------------------------------------------------------------------------- +// Tests +//-------------------------------------------------------------------------------------------------- + +#[cfg(test)] +mod key_prop_tests { + use crate::utils::ProptestRng; + + use super::*; + use proptest::prelude::any; + use proptest::test_runner::RngAlgorithm; + use test_strategy::proptest; + + #[proptest(cases = 50)] + fn key_can_encrypt_and_decrypt_data( + #[strategy(any::>())] data: Vec, + #[strategy(any::<[u8; 32]>())] rng_seed: [u8; 32], + key_bytes: [u8; 32], + ) { + let key = Key::new(key_bytes); + let rng = &mut ProptestRng::from_seed(RngAlgorithm::ChaCha, &rng_seed); + + let encrypted = key.encrypt(&Key::generate_nonce(rng), &data).unwrap(); + let decrypted = key.decrypt(&encrypted).unwrap(); + + assert_eq!(decrypted, data); + } +} diff --git a/crates/fs/private/link.rs b/crates/fs/private/link.rs new file mode 100644 index 00000000..d5fd00ce --- /dev/null +++ b/crates/fs/private/link.rs @@ -0,0 +1,141 @@ +use anyhow::Result; +use async_once_cell::OnceCell; +use libipld::Cid; +use serde::{de::DeserializeOwned, Serialize}; + +use crate::{BlockStore, Link}; + +use super::{Key, Rng}; + +//-------------------------------------------------------------------------------------------------- +// Type Definitions +//-------------------------------------------------------------------------------------------------- + +/// A data structure that represents a link in the IPLD graph. Basically it is a "link" to some content addressable value of `T`. +/// +/// It supports representing the "link" with a Cid or the deserialized value itself. +/// +/// Link needs a `BlockStore` to be able to resolve Cids to corresponding values of `T` and vice versa. +#[derive(Debug, Clone)] +pub struct PrivateLink(Link); + +//-------------------------------------------------------------------------------------------------- +// Implementations +//-------------------------------------------------------------------------------------------------- + +impl PrivateLink { + /// Creates a new `Referenceable` that starts out as a value of `R`. + pub fn from_reference(cid: Cid) -> Self { + Self(Link::Encoded { + reference: cid, + value_cache: OnceCell::new(), + }) + } + + /// Gets an owned value from type. It attempts to it get from the store if it is not present in type. + pub async fn get_owned_value<'a, B, R>(self, store: &B, key: &Key) -> Result + where + B: BlockStore, + R: Rng, + V: DeserializeOwned, + { + match self.0 { + Link::Encoded { + ref reference, + value_cache, + } => match value_cache.into_inner() { + Some(cached) => Ok(cached), + None => store.get_private_deserializable(reference, key).await, + }, + Link::Decoded { value, .. } => Ok(value), + } + } + + /// Gets the value stored in type. + /// + /// NOTE: This does not attempt to get it from the store if it does not exist. + #[inline] + pub fn get_value(&self) -> Option<&V> { + self.0.get_value() + } + + /// Gets the cid data stored in type. + /// + /// NOTE: This does not attempt to get it from the store if it does not exist. + #[inline] + pub fn get_cid(&self) -> Option<&Cid> { + self.0.get_cid() + } + + /// Gets the value stored in link. It attempts to get it from the store if it is not present in link. + pub async fn resolve_value<'a, 'b, B, R>(&'a self, store: &B, key: &Key) -> Result<&'a V> + where + B: BlockStore, + R: Rng, + V: DeserializeOwned, + { + match &self.0 { + Link::Encoded { + reference, + value_cache, + } => { + value_cache + .get_or_try_init(async { + store.get_private_deserializable(reference, key).await + }) + .await + } + Link::Decoded { value, .. } => Ok(value), + } + } + + /// Gets the cid data stored in type. It attempts to get it from the store if it is not present in type. + pub async fn resolve_cid<'a, 'b, B, R>( + &'a self, + store: &mut B, + key: &Key, + rng: &mut R, + ) -> Result<&'a Cid> + where + B: BlockStore, + R: Rng, + V: Serialize, + { + match &self.0 { + Link::Encoded { reference, .. } => Ok(reference), + Link::Decoded { + value, + reference_cache, + } => { + reference_cache + .get_or_try_init(async { + store + .put_private_serializable::<_, R>(value, key, rng) + .await + }) + .await + } + } + } + + /// Checks if there is a value stored in link. + #[inline] + pub fn has_value(&self) -> bool { + self.0.has_value() + } + + /// Checks if there is a Cid stored in link. + #[inline] + pub fn has_cid(&self) -> bool { + self.0.has_cid() + } +} + +impl From for PrivateLink { + fn from(value: V) -> Self { + Self(Link::Decoded { + value, + reference_cache: OnceCell::new(), + }) + } +} diff --git a/crates/fs/private/mod.rs b/crates/fs/private/mod.rs index 0c0db162..f96800da 100644 --- a/crates/fs/private/mod.rs +++ b/crates/fs/private/mod.rs @@ -1,5 +1,15 @@ -mod hamt; -mod namefilter; +mod directory; +mod file; +mod forest; +pub mod hamt; +mod key; +mod link; +pub mod namefilter; +mod node; -pub use hamt::*; -pub use namefilter::*; +pub use directory::*; +pub use file::*; +pub use forest::*; +pub use key::*; +pub use link::*; +pub use node::*; diff --git a/crates/fs/private/namefilter/bloomfilter.rs b/crates/fs/private/namefilter/bloomfilter.rs index 4c4cfe46..772c3f64 100644 --- a/crates/fs/private/namefilter/bloomfilter.rs +++ b/crates/fs/private/namefilter/bloomfilter.rs @@ -1,8 +1,12 @@ use std::ops::Index; +use anyhow::anyhow; use bitvec::prelude::BitArray; +use serde::{Deserialize, Serialize}; use xxhash_rust::xxh3; +use crate::utils::ByteArrayVisitor; + //------------------------------------------------------------------------------ // Type Definitions //------------------------------------------------------------------------------ @@ -12,7 +16,7 @@ use xxhash_rust::xxh3; /// `N` is the size of the bloom filter in bytes. /// /// `K` is the number of bits to be set with each add operation. -#[derive(Debug, Clone, PartialEq, Eq)] +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd)] pub struct BloomFilter { pub(super) bits: BitArray<[u8; N]>, } @@ -106,6 +110,20 @@ impl BloomFilter { } } +impl TryFrom> for BloomFilter { + type Error = anyhow::Error; + + fn try_from(bytes: Vec) -> Result { + let bits = BitArray::<[u8; N]>::new(bytes.try_into().map_err(|e: Vec| { + anyhow!( + "Cannot convert vector to BloomFilter: Expected length {}", + e.len() + ) + })?); + Ok(Self { bits }) + } +} + impl Index for BloomFilter { type Output = bool; @@ -121,12 +139,33 @@ impl Default for BloomFilter { } } +impl Serialize for BloomFilter { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + serializer.serialize_bytes(self.bits.as_raw_slice()) + } +} + +impl<'de, const N: usize, const K: usize> Deserialize<'de> for BloomFilter { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + Ok(BloomFilter:: { + bits: BitArray::<[u8; N]>::new(deserializer.deserialize_bytes(ByteArrayVisitor::)?), + }) + } +} + //------------------------------------------------------------------------------ // Tests //------------------------------------------------------------------------------ #[cfg(test)] mod bloomfilter_tests { + use libipld::serde as ipld_serde; use rand::{thread_rng, Rng}; use super::*; @@ -163,4 +202,18 @@ mod bloomfilter_tests { assert_eq!(indices.len(), count); } } + + #[test] + fn serialized_bloom_filter_can_be_deserialized_correctly() { + let mut bloom = BloomFilter::<256, 30>::new(); + let items: Vec = vec!["first".into(), "second".into(), "third".into()]; + items.iter().for_each(|item| { + bloom.add(item); + }); + + let ipld = ipld_serde::to_ipld(&bloom).unwrap(); + let deserialized: BloomFilter<256, 30> = ipld_serde::from_ipld(ipld).unwrap(); + + assert_eq!(deserialized, bloom); + } } diff --git a/crates/fs/private/node.rs b/crates/fs/private/node.rs new file mode 100644 index 00000000..17a29893 --- /dev/null +++ b/crates/fs/private/node.rs @@ -0,0 +1,284 @@ +use std::{io::Cursor, rc::Rc}; + +use anyhow::{anyhow, bail, Result}; +use chrono::{DateTime, Utc}; +use libipld::{ + cbor::DagCborCodec, + codec::{Decode, Encode}, + serde as ipld_serde, Ipld, +}; +use serde::{Deserialize, Serialize, Serializer}; +use sha3::Sha3_256; +use skip_ratchet::Ratchet; + +use crate::{FsError, HashOutput, Id, Metadata}; + +use super::{ + hamt::Hasher, namefilter::Namefilter, Key, PrivateDirectory, PrivateDirectoryContent, + PrivateFile, PrivateFileContent, +}; + +//-------------------------------------------------------------------------------------------------- +// Type Definitions +//-------------------------------------------------------------------------------------------------- + +pub type INumber = HashOutput; + +#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)] +pub struct ContentKey(pub Key); + +#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)] +pub struct RatchetKey(pub Key); + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct PrivateNodeHeader { + pub(crate) bare_name: Namefilter, + pub(crate) ratchet: Ratchet, + pub(crate) inumber: INumber, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum PrivateNode { + File(Rc), + Dir(Rc), +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct PrivateRef { + /// Sha3-256 hash of saturated namefilter. + pub(crate) saturated_name_hash: HashOutput, + /// Sha3-256 hash of the ratchet key. + pub(crate) content_key: ContentKey, + /// Skip-ratchet-derived key. + pub(crate) ratchet_key: RatchetKey, +} + +//-------------------------------------------------------------------------------------------------- +// Implementations +//-------------------------------------------------------------------------------------------------- + +impl PrivateNodeHeader { + /// Creates a new PrivateNodeHeader. + pub fn new(parent_bare_name: Namefilter, inumber: INumber, ratchet_seed: HashOutput) -> Self { + Self { + bare_name: { + let mut namefilter = parent_bare_name; + namefilter.add(&inumber); + namefilter + }, + ratchet: Ratchet::zero(ratchet_seed), + inumber, + } + } + + /// Advances the ratchet. + pub fn advance_ratchet(&mut self) { + self.ratchet.inc(); + } + + /// Gets the private ref of the current header. + pub fn get_private_ref(&self) -> Result { + let ratchet_key = Key::new(self.ratchet.derive_key()); + let saturated_name_hash = Sha3_256::hash(&self.get_saturated_name_with_key(&ratchet_key)); + + Ok(PrivateRef { + saturated_name_hash, + content_key: ContentKey(Key::new(Sha3_256::hash(&ratchet_key.as_bytes()))), + ratchet_key: RatchetKey(ratchet_key), + }) + } + + /// Gets the saturated namefilter for this node using the provided ratchet key. + pub fn get_saturated_name_with_key(&self, ratchet_key: &Key) -> Namefilter { + let mut name = self.bare_name.clone(); + name.add(&ratchet_key.as_bytes()); + name.saturate(); + name + } + + /// Gets the saturated namefilter for this node. + #[inline] + pub fn get_saturated_name(&self) -> Namefilter { + let ratchet_key = Key::new(self.ratchet.derive_key()); + self.get_saturated_name_with_key(&ratchet_key) + } +} + +impl PrivateNode { + /// Creates node with updated modified time. + pub fn update_mtime(&self, time: DateTime) -> Self { + match self { + Self::File(file) => { + let mut file = (**file).clone(); + file.content.metadata.unix_fs.modified = time.timestamp(); + Self::File(Rc::new(file)) + } + Self::Dir(dir) => { + let mut dir = (**dir).clone(); + dir.content.metadata.unix_fs.modified = time.timestamp(); + Self::Dir(Rc::new(dir)) + } + } + } + + /// Gets the header of the node. + pub fn header(&self) -> &PrivateNodeHeader { + match self { + Self::File(file) => &file.header, + Self::Dir(dir) => &dir.header, + } + } + + /// Serializes the node header section. + pub fn serialize_header(&self, serializer: S) -> Result { + match self { + PrivateNode::File(file) => file.header.serialize(serializer), + PrivateNode::Dir(dir) => dir.header.serialize(serializer), + } + } + + /// Serializes the node content section. + pub fn serialize_content(&self, serializer: S) -> Result { + match self { + PrivateNode::File(file) => file.content.serialize(serializer), + PrivateNode::Dir(dir) => dir.content.serialize(serializer), + } + } + + /// Serializes the node into dag-cbor bytes. + pub fn serialize_as_cbor(&self) -> Result<(Vec, Vec)> { + let header_ipld = self.serialize_header(ipld_serde::Serializer)?; + let content_ipld = self.serialize_content(ipld_serde::Serializer)?; + + let mut header_bytes = Vec::new(); + let mut content_bytes = Vec::new(); + + header_ipld.encode(DagCborCodec, &mut header_bytes)?; + content_ipld.encode(DagCborCodec, &mut content_bytes)?; + + Ok((header_bytes, content_bytes)) + } + + /// Deserializes the node from dag-cbor bytes. + pub fn deserialize_from_cbor( + header_bytes: &Option>, + content_bytes: &[u8], + ) -> Result { + let header_ipld = match header_bytes { + Some(bytes) => Ipld::decode(DagCborCodec, &mut Cursor::new(bytes))?, + None => bail!(FsError::MissingHeader), + }; + + let header: PrivateNodeHeader = ipld_serde::from_ipld(header_ipld)?; + + let content_ipld = Ipld::decode(DagCborCodec, &mut Cursor::new(content_bytes))?; + + Self::deserialize_content(content_ipld, header) + } + + /// Deserializes the node content from IPLD form. + pub fn deserialize_content(content_ipld: Ipld, header: PrivateNodeHeader) -> Result { + match content_ipld { + Ipld::Map(map) => { + let metadata_ipld = map + .get("metadata") + .ok_or("Missing metadata field") + .map_err(|e| anyhow!(e))?; + + let metadata: Metadata = + metadata_ipld.try_into().map_err(|e: String| anyhow!(e))?; + + Ok(if metadata.is_file() { + let content = PrivateFileContent::deserialize(Ipld::Map(map))?; + PrivateNode::from(PrivateFile { header, content }) + } else { + let content = PrivateDirectoryContent::deserialize(Ipld::Map(map))?; + PrivateNode::from(PrivateDirectory { header, content }) + }) + } + other => bail!(FsError::InvalidDeserialization(format!( + "Expected `Ipld::Map` got {:?}", + other + ))), + } + } + + /// Casts a node to a directory. + /// + /// # Panics + /// + /// Panics if the node is not a directory. + pub fn as_dir(&self) -> Result> { + Ok(match self { + Self::Dir(dir) => Rc::clone(dir), + _ => bail!(FsError::NotADirectory), + }) + } + + /// Casts a node to a file. + /// + /// # Panics + /// + /// Panics if the node is not a file. + pub fn as_file(&self) -> Result> { + Ok(match self { + Self::File(file) => Rc::clone(file), + _ => bail!(FsError::NotAFile), + }) + } + + /// Returns true if underlying node is a directory. + pub fn is_dir(&self) -> bool { + matches!(self, Self::Dir(_)) + } +} + +impl Id for PrivateNode { + fn get_id(&self) -> String { + match self { + Self::File(file) => file.get_id(), + Self::Dir(dir) => dir.get_id(), + } + } +} + +impl From for PrivateNode { + fn from(file: PrivateFile) -> Self { + Self::File(Rc::new(file)) + } +} + +impl From for PrivateNode { + fn from(dir: PrivateDirectory) -> Self { + Self::Dir(Rc::new(dir)) + } +} + +//-------------------------------------------------------------------------------------------------- +// Tests +//-------------------------------------------------------------------------------------------------- + +#[cfg(test)] +mod private_node_tests { + use crate::{private::Rng, utils::TestRng}; + + use super::*; + + #[test] + fn serialized_private_node_can_be_deserialized() { + let rng = &mut TestRng(); + let original_file = PrivateNode::File(Rc::new(PrivateFile::new( + Namefilter::default(), + rng.random_bytes::<32>(), + rng.random_bytes::<32>(), + Utc::now(), + b"Lorem ipsum dolor sit amet".to_vec(), + ))); + + let (header_bytes, content_bytes) = original_file.serialize_as_cbor().unwrap(); + let deserialized_node = + PrivateNode::deserialize_from_cbor(&Some(header_bytes), &content_bytes).unwrap(); + + assert_eq!(original_file, deserialized_node); + } +} diff --git a/crates/fs/public/directory.rs b/crates/fs/public/directory.rs index 52b3843b..58847064 100644 --- a/crates/fs/public/directory.rs +++ b/crates/fs/public/directory.rs @@ -2,7 +2,10 @@ use std::{collections::BTreeMap, rc::Rc}; -use crate::{error, AsyncSerialize, BlockStore, FsError, Id, Metadata, UnixFsNodeKind}; +use crate::{ + error, utils, AsyncSerialize, BlockStore, FsError, Id, Metadata, PathNodes, PathNodesResult, + ReferenceableStore, UnixFsNodeKind, +}; use anyhow::{bail, ensure, Result}; use async_recursion::async_recursion; use async_stream::try_stream; @@ -18,6 +21,9 @@ use super::{PublicFile, PublicLink, PublicNode}; // Type Definitions //-------------------------------------------------------------------------------------------------- +pub type PublicPathNodes = PathNodes; +pub type PublicPathNodesResult = PathNodesResult; + /// A directory in a WNFS public file system. /// /// # Examples @@ -32,7 +38,7 @@ use super::{PublicFile, PublicLink, PublicNode}; /// ``` #[derive(Debug, Clone, PartialEq)] pub struct PublicDirectory { - pub(crate) metadata: Metadata, + pub metadata: Metadata, pub(crate) userland: BTreeMap, pub(crate) previous: Option, } @@ -46,175 +52,17 @@ struct PublicDirectorySerde { /// The result of an operation applied to a directory. #[derive(Debug, Clone, PartialEq)] -pub struct OpResult { +pub struct PublicOpResult { /// The root directory. pub root_dir: Rc, /// Implementation dependent but it usually the last leaf node operated on. pub result: T, } -/// Represents the directory nodes along a path. -/// -/// # Examples -/// -/// ``` -/// use wnfs::public::{PublicDirectory, PathNodes}; -/// use std::rc::Rc; -/// use chrono::Utc; -/// -/// let nodes = PathNodes::new( -/// Utc::now(), -/// &["movies".into(), "anime".into()], -/// Rc::new(PublicDirectory::new(Utc::now())), -/// ); -/// -/// println!("path nodes = {:?}", nodes); -/// ``` -#[derive(Debug, Clone, PartialEq)] -pub struct PathNodes { - pub path: Vec<(Rc, String)>, - pub tail: Rc, -} - -/// The kinds of outcome from getting a `PathNodes`. -/// -/// # Examples -/// -/// ``` -/// use wnfs::{public::{PublicDirectory, OpResult}, MemoryBlockStore}; -/// use std::rc::Rc; -/// use chrono::Utc; -/// -/// #[async_std::main] -/// async fn main() { -/// let time = Utc::now(); -/// let dir = Rc::new(PublicDirectory::new(time)); -/// let store = MemoryBlockStore::default(); -/// -/// let OpResult { root_dir, result } = dir -/// .ls(&[], &store) -/// .await -/// .unwrap(); -/// -/// println!("ls = {:?}", result); -/// } -/// ``` -#[derive(Debug, Clone, PartialEq)] -pub enum PathNodesResult { - Complete(PathNodes), - MissingLink(PathNodes, String), - NotADirectory(PathNodes, String), -} - //-------------------------------------------------------------------------------------------------- // Implementations //-------------------------------------------------------------------------------------------------- -impl PathNodes { - /// Creates a new `PathNodes` that is not based on an existing file tree. - /// - /// # Examples - /// - /// ``` - /// use wnfs::public::{PublicDirectory, PathNodes}; - /// use std::rc::Rc; - /// use chrono::Utc; - /// - /// let nodes = PathNodes::new( - /// Utc::now(), - /// &["movies".into(), "anime".into()], - /// Rc::new(PublicDirectory::new(Utc::now())), - /// ); - /// - /// println!("path nodes = {:?}", nodes); - /// ``` - pub fn new(time: DateTime, path_segments: &[String], tail: Rc) -> Self { - let path: Vec<(Rc, String)> = path_segments - .iter() - .map(|segment| (Rc::new(PublicDirectory::new(time)), segment.clone())) - .collect(); - - Self { path, tail } - } - - /// Constructs a diverged path nodes by fixing up links in a `PathNodes` and returning the resulting root node. - /// - /// # Examples - /// - /// ``` - /// use wnfs::public::{PublicDirectory, PathNodes}; - /// use std::rc::Rc; - /// use chrono::Utc; - /// - /// let nodes = PathNodes::new( - /// Utc::now(), - /// &["movies".into(), "anime".into()], - /// Rc::new(PublicDirectory::new(Utc::now())), - /// ); - /// - /// let new_root = nodes.reconstruct(); - /// - /// println!("new_root = {:?}", new_root); - /// ``` - pub fn reconstruct(self) -> Rc { - if self.path.is_empty() { - return self.tail; - } - - let mut working_dir = self.tail; - for (dir, segment) in self.path.iter().rev() { - let mut dir = (**dir).clone(); - let link = PublicLink::with_dir(working_dir); - dir.userland.insert(segment.clone(), link); - working_dir = Rc::new(dir); - } - - working_dir - } - - /// Returns the length of the path nodes. - /// - /// # Examples - /// - /// ``` - /// use wnfs::public::{PublicDirectory, PathNodes}; - /// use std::rc::Rc; - /// use chrono::Utc; - /// - /// let nodes = PathNodes::new( - /// Utc::now(), - /// &["movies".into(), "anime".into()], - /// Rc::new(PublicDirectory::new(Utc::now())), - /// ); - /// - /// assert_eq!(nodes.len(), 2); - /// ``` - pub fn len(&self) -> usize { - self.path.len() - } - - /// Checks if the path nodes are empty. - /// - /// # Examples - /// - /// ``` - /// use wnfs::public::{PublicDirectory, PathNodes}; - /// use std::rc::Rc; - /// use chrono::Utc; - /// - /// let nodes = PathNodes::new( - /// Utc::now(), - /// &["movies".into(), "anime".into()], - /// Rc::new(PublicDirectory::new(Utc::now())), - /// ); - /// - /// assert!(!nodes.is_empty()); - /// ``` - pub fn is_empty(&self) -> bool { - self.path.is_empty() - } -} - impl PublicDirectory { /// Creates a new directory with provided time. /// @@ -237,6 +85,7 @@ impl PublicDirectory { } /// Gets the previous value of the directory. + #[inline] pub fn get_previous(self: &Rc) -> Option { self.previous } @@ -246,14 +95,30 @@ impl PublicDirectory { &self.metadata } - /// Gets the directory nodes along specified path. + /// Creates a new `PublicPathNodes` that is not based on an existing file tree. + pub(crate) fn create_path_nodes( + path_segments: &[String], + time: DateTime, + ) -> PublicPathNodes { + let path: Vec<(Rc, String)> = path_segments + .iter() + .map(|segment| (Rc::new(PublicDirectory::new(time)), segment.clone())) + .collect(); + + PublicPathNodes { + path, + tail: Rc::new(PublicDirectory::new(time)), + } + } + + /// Uses specified path segments and their existence in the file tree to generate `PathNodes`. /// /// Supports cases where the entire path does not exist. pub(crate) async fn get_path_nodes( self: Rc, path_segments: &[String], store: &B, - ) -> Result { + ) -> Result { use PathNodesResult::*; let mut working_node = self; let mut path_nodes = Vec::with_capacity(path_segments.len()); @@ -283,29 +148,28 @@ impl PublicDirectory { } } - Ok(Complete(PathNodes { + Ok(Complete(PublicPathNodes { path: path_nodes, tail: Rc::clone(&working_node), })) } - /// Gets the directory nodes along a path and also supports creating missing intermediate directories. - pub(crate) async fn get_path_nodes_or_create( + /// Uses specified path segments to generate `PathNodes`. Creates missing directories as needed. + pub(crate) async fn get_or_create_path_nodes( self: Rc, path_segments: &[String], time: DateTime, store: &B, - ) -> Result { + ) -> Result { use PathNodesResult::*; match self.get_path_nodes(path_segments, store).await? { Complete(path_nodes) => Ok(path_nodes), NotADirectory(_, _) => error(FsError::InvalidPath), MissingLink(path_so_far, missing_link) => { let missing_path = path_segments.split_at(path_so_far.path.len() + 1).1; - let missing_path_nodes = - PathNodes::new(time, missing_path, Rc::new(PublicDirectory::new(time))); + let missing_path_nodes = Self::create_path_nodes(missing_path, time); - Ok(PathNodes { + Ok(PublicPathNodes { path: [ path_so_far.path, vec![(path_so_far.tail, missing_link)], @@ -318,12 +182,29 @@ impl PublicDirectory { } } + /// Fix up `PathNodes` so that parents refer to the newly updated children. + fn fix_up_path_nodes(path_nodes: PublicPathNodes) -> Rc { + if path_nodes.path.is_empty() { + return path_nodes.tail; + } + + let mut working_dir = path_nodes.tail; + for (dir, segment) in path_nodes.path.iter().rev() { + let mut dir = (**dir).clone(); + let link = PublicLink::with_dir(working_dir); + dir.userland.insert(segment.clone(), link); + working_dir = Rc::new(dir); + } + + working_dir + } + /// Follows a path and fetches the node at the end of the path. /// /// # Examples /// /// ``` - /// use wnfs::{public::{PublicDirectory, OpResult}, MemoryBlockStore}; + /// use wnfs::{public::{PublicDirectory, PublicOpResult}, MemoryBlockStore}; /// use std::rc::Rc; /// use chrono::Utc; /// @@ -333,12 +214,12 @@ impl PublicDirectory { /// let dir = Rc::new(PublicDirectory::new(time)); /// let store = MemoryBlockStore::default(); /// - /// let OpResult { root_dir, .. } = Rc::new(PublicDirectory::new(Utc::now())) + /// let PublicOpResult { root_dir, .. } = Rc::new(PublicDirectory::new(Utc::now())) /// .mkdir(&["pictures".into(), "cats".into()], Utc::now(), &store) /// .await /// .unwrap(); /// - /// let OpResult { root_dir, result } = root_dir + /// let PublicOpResult { root_dir, result } = root_dir /// .get_node(&["pictures".into()], &store) /// .await /// .unwrap(); @@ -350,14 +231,14 @@ impl PublicDirectory { self: Rc, path_segments: &[String], store: &B, - ) -> Result>> { + ) -> Result>> { use PathNodesResult::*; let root_dir = Rc::clone(&self); Ok(match path_segments.split_last() { Some((path_segment, parent_path)) => { match self.get_path_nodes(parent_path, store).await? { - Complete(parent_path_nodes) => OpResult { + Complete(parent_path_nodes) => PublicOpResult { root_dir, result: parent_path_nodes .tail @@ -368,7 +249,7 @@ impl PublicDirectory { NotADirectory(_, _) => bail!(FsError::NotFound), } } - None => OpResult { + None => PublicOpResult { root_dir, result: Some(PublicNode::Dir(self)), }, @@ -380,7 +261,7 @@ impl PublicDirectory { /// # Examples /// /// ``` - /// use wnfs::{public::{PublicDirectory, OpResult}, Id, MemoryBlockStore}; + /// use wnfs::{public::{PublicDirectory, PublicOpResult}, Id, MemoryBlockStore}; /// use std::rc::Rc; /// use chrono::Utc; /// @@ -388,7 +269,7 @@ impl PublicDirectory { /// async fn main() { /// let mut store = MemoryBlockStore::default(); /// - /// let OpResult { root_dir, .. } = Rc::new(PublicDirectory::new(Utc::now())) + /// let PublicOpResult { root_dir, .. } = Rc::new(PublicDirectory::new(Utc::now())) /// .mkdir(&["pictures".into(), "cats".into()], Utc::now(), &store) /// .await /// .unwrap(); @@ -438,7 +319,7 @@ impl PublicDirectory { /// # Examples /// /// ``` - /// use wnfs::{public::{PublicDirectory, OpResult}, MemoryBlockStore}; + /// use wnfs::{public::{PublicDirectory, PublicOpResult}, MemoryBlockStore}; /// use libipld::cid::Cid; /// use std::rc::Rc; /// use chrono::Utc; @@ -450,7 +331,7 @@ impl PublicDirectory { /// let mut store = MemoryBlockStore::default(); /// let cid = Cid::default(); /// - /// let OpResult { root_dir, .. } = Rc::new(PublicDirectory::new(Utc::now())) + /// let PublicOpResult { root_dir, .. } = Rc::new(PublicDirectory::new(Utc::now())) /// .write( /// &["pictures".into(), "cats".into(), "tabby.png".into()], /// cid, @@ -460,7 +341,7 @@ impl PublicDirectory { /// .await /// .unwrap(); /// - /// let OpResult { root_dir, result } = root_dir + /// let PublicOpResult { root_dir, result } = root_dir /// .read(&["pictures".into(), "cats".into(), "tabby.png".into()], &mut store) /// .await /// .unwrap(); @@ -472,14 +353,14 @@ impl PublicDirectory { self: Rc, path_segments: &[String], store: &mut B, - ) -> Result> { + ) -> Result> { let root_dir = Rc::clone(&self); let (path, filename) = utils::split_last(path_segments)?; match self.get_path_nodes(path, store).await? { PathNodesResult::Complete(node_path) => { match node_path.tail.lookup_node(filename, store).await? { - Some(PublicNode::File(file)) => Ok(OpResult { + Some(PublicNode::File(file)) => Ok(PublicOpResult { root_dir, result: file.userland, }), @@ -496,7 +377,7 @@ impl PublicDirectory { /// # Examples /// /// ``` - /// use wnfs::{public::{PublicDirectory, OpResult}, MemoryBlockStore}; + /// use wnfs::{public::{PublicDirectory, PublicOpResult}, MemoryBlockStore}; /// use libipld::cid::Cid; /// use std::rc::Rc; /// use chrono::Utc; @@ -507,7 +388,7 @@ impl PublicDirectory { /// let dir = Rc::new(PublicDirectory::new(time)); /// let store = MemoryBlockStore::default(); /// - /// let OpResult { root_dir, .. } = Rc::new(PublicDirectory::new(Utc::now())) + /// let PublicOpResult { root_dir, .. } = Rc::new(PublicDirectory::new(Utc::now())) /// .write( /// &["pictures".into(), "cats".into(), "tabby.png".into()], /// Cid::default(), @@ -524,12 +405,12 @@ impl PublicDirectory { content_cid: Cid, time: DateTime, store: &B, - ) -> Result> { + ) -> Result> { let (directory_path, filename) = utils::split_last(path_segments)?; // This will create directories if they don't exist yet let mut directory_path_nodes = self - .get_path_nodes_or_create(directory_path, time, store) + .get_or_create_path_nodes(directory_path, time, store) .await?; let mut directory = (*directory_path_nodes.tail).clone(); @@ -552,9 +433,9 @@ impl PublicDirectory { .insert(filename.to_string(), PublicLink::with_file(Rc::new(file))); directory_path_nodes.tail = Rc::new(directory); - // reconstruct the file path - Ok(OpResult { - root_dir: directory_path_nodes.reconstruct(), + // Fix up the file path + Ok(PublicOpResult { + root_dir: Self::fix_up_path_nodes(directory_path_nodes), result: (), }) } @@ -564,7 +445,7 @@ impl PublicDirectory { /// # Examples /// /// ``` - /// use wnfs::{public::{PublicDirectory, OpResult}, Id, MemoryBlockStore}; + /// use wnfs::{public::{PublicDirectory, PublicOpResult}, Id, MemoryBlockStore}; /// use std::rc::Rc; /// use chrono::Utc; /// @@ -572,7 +453,7 @@ impl PublicDirectory { /// async fn main() { /// let mut store = MemoryBlockStore::default(); /// - /// let OpResult { root_dir, .. } = Rc::new(PublicDirectory::new(Utc::now())) + /// let PublicOpResult { root_dir, .. } = Rc::new(PublicDirectory::new(Utc::now())) /// .mkdir(&["pictures".into(), "cats".into()], Utc::now(), &store) /// .await /// .unwrap(); @@ -585,23 +466,23 @@ impl PublicDirectory { path_segments: &[String], time: DateTime, store: &B, - ) -> Result> { + ) -> Result> { let path_nodes = self - .get_path_nodes_or_create(path_segments, time, store) + .get_or_create_path_nodes(path_segments, time, store) .await?; - Ok(OpResult { - root_dir: path_nodes.reconstruct(), + Ok(PublicOpResult { + root_dir: Self::fix_up_path_nodes(path_nodes), result: (), }) } - /// Returns the name and metadata of the direct children of a directory. + /// Returns names and metadata of directory's immediate children. /// /// # Examples /// /// ``` - /// use wnfs::{public::{PublicDirectory, OpResult}, MemoryBlockStore}; + /// use wnfs::{public::{PublicDirectory, PublicOpResult}, MemoryBlockStore}; /// use libipld::cid::Cid; /// use std::rc::Rc; /// use chrono::Utc; @@ -612,7 +493,7 @@ impl PublicDirectory { /// let dir = Rc::new(PublicDirectory::new(time)); /// let store = MemoryBlockStore::default(); /// - /// let OpResult { root_dir, .. } = Rc::new(PublicDirectory::new(Utc::now())) + /// let PublicOpResult { root_dir, .. } = Rc::new(PublicDirectory::new(Utc::now())) /// .write( /// &["pictures".into(), "cats".into(), "tabby.png".into()], /// Cid::default(), @@ -622,7 +503,7 @@ impl PublicDirectory { /// .await /// .unwrap(); /// - /// let OpResult { root_dir, result } = root_dir + /// let PublicOpResult { root_dir, result } = root_dir /// .ls(&["pictures".into(), "cats".into()], &store) /// .await /// .unwrap(); @@ -635,7 +516,7 @@ impl PublicDirectory { self: Rc, path_segments: &[String], store: &B, - ) -> Result>> { + ) -> Result>> { let root_dir = Rc::clone(&self); match self.get_path_nodes(path_segments, store).await? { PathNodesResult::Complete(path_nodes) => { @@ -650,7 +531,7 @@ impl PublicDirectory { } } } - Ok(OpResult { root_dir, result }) + Ok(PublicOpResult { root_dir, result }) } _ => bail!(FsError::NotFound), } @@ -661,7 +542,7 @@ impl PublicDirectory { /// # Examples /// /// ``` - /// use wnfs::{public::{PublicDirectory, OpResult}, MemoryBlockStore}; + /// use wnfs::{public::{PublicDirectory, PublicOpResult}, MemoryBlockStore}; /// use libipld::cid::Cid; /// use std::rc::Rc; /// use chrono::Utc; @@ -672,7 +553,7 @@ impl PublicDirectory { /// let dir = Rc::new(PublicDirectory::new(time)); /// let store = MemoryBlockStore::default(); /// - /// let OpResult { root_dir, .. } = Rc::new(PublicDirectory::new(Utc::now())) + /// let PublicOpResult { root_dir, .. } = Rc::new(PublicDirectory::new(Utc::now())) /// .write( /// &["pictures".into(), "cats".into(), "tabby.png".into()], /// Cid::default(), @@ -682,12 +563,12 @@ impl PublicDirectory { /// .await /// .unwrap(); /// - /// let OpResult { root_dir, .. } = root_dir + /// let PublicOpResult { root_dir, .. } = root_dir /// .rm(&["pictures".into(), "cats".into()], &store) /// .await /// .unwrap(); /// - /// let OpResult { root_dir, result } = root_dir + /// let PublicOpResult { root_dir, result } = root_dir /// .ls(&["pictures".into()], &store) /// .await /// .unwrap(); @@ -699,17 +580,17 @@ impl PublicDirectory { self: Rc, path_segments: &[String], store: &B, - ) -> Result> { + ) -> Result> { let (directory_path, node_name) = utils::split_last(path_segments)?; let mut directory_node_path = match self.get_path_nodes(directory_path, store).await? { - PathNodesResult::Complete(node_path) => node_path, + PublicPathNodesResult::Complete(node_path) => node_path, _ => bail!(FsError::NotFound), }; let mut directory = (*directory_node_path.tail).clone(); - // remove the entry from its parent directory + // Remove the entry from its parent directory let removed_node = match directory.userland.remove(node_name) { Some(link) => link.get_owned_value(store).await?, None => bail!(FsError::NotFound), @@ -717,8 +598,8 @@ impl PublicDirectory { directory_node_path.tail = Rc::new(directory); - Ok(OpResult { - root_dir: directory_node_path.reconstruct(), + Ok(PublicOpResult { + root_dir: Self::fix_up_path_nodes(directory_node_path), result: removed_node, }) } @@ -730,7 +611,7 @@ impl PublicDirectory { /// # Examples /// /// ``` - /// use wnfs::{public::{PublicDirectory, OpResult}, MemoryBlockStore}; + /// use wnfs::{public::{PublicDirectory, PublicOpResult}, MemoryBlockStore}; /// use libipld::cid::Cid; /// use std::rc::Rc; /// use chrono::Utc; @@ -741,7 +622,7 @@ impl PublicDirectory { /// let dir = Rc::new(PublicDirectory::new(time)); /// let store = MemoryBlockStore::default(); /// - /// let OpResult { root_dir, .. } = Rc::new(PublicDirectory::new(Utc::now())) + /// let PublicOpResult { root_dir, .. } = Rc::new(PublicDirectory::new(Utc::now())) /// .write( /// &["pictures".into(), "cats".into(), "tabby.png".into()], /// Cid::default(), @@ -751,7 +632,7 @@ impl PublicDirectory { /// .await /// .unwrap(); /// - /// let OpResult { root_dir, .. } = root_dir + /// let PublicOpResult { root_dir, .. } = root_dir /// .basic_mv( /// &["pictures".into(), "cats".into()], /// &["cats".into()], @@ -761,7 +642,7 @@ impl PublicDirectory { /// .await /// .unwrap(); /// - /// let OpResult { root_dir, result } = root_dir + /// let PublicOpResult { root_dir, result } = root_dir /// .ls(&[], &store) /// .await /// .unwrap(); @@ -775,17 +656,17 @@ impl PublicDirectory { path_segments_to: &[String], time: DateTime, store: &B, - ) -> Result> { + ) -> Result> { let root_dir = Rc::clone(&self); let (directory_path_nodes, filename) = utils::split_last(path_segments_to)?; - let OpResult { + let PublicOpResult { root_dir, result: removed_node, } = root_dir.rm(path_segments_from, store).await?; let mut path_nodes = match root_dir.get_path_nodes(directory_path_nodes, store).await? { - PathNodesResult::Complete(node_path) => node_path, + PublicPathNodesResult::Complete(node_path) => node_path, _ => bail!(FsError::NotFound), }; @@ -804,8 +685,8 @@ impl PublicDirectory { path_nodes.tail = Rc::new(directory); - Ok(OpResult { - root_dir: path_nodes.reconstruct(), + Ok(PublicOpResult { + root_dir: Self::fix_up_path_nodes(path_nodes), result: (), }) } @@ -815,7 +696,7 @@ impl PublicDirectory { /// # Examples /// /// ``` - /// use wnfs::{public::{PublicDirectory, OpResult}, MemoryBlockStore}; + /// use wnfs::{public::{PublicDirectory, PublicOpResult}, MemoryBlockStore}; /// use libipld::cid::Cid; /// use std::rc::Rc; /// use chrono::Utc; @@ -826,7 +707,7 @@ impl PublicDirectory { /// let dir = Rc::new(PublicDirectory::new(time)); /// let mut store = MemoryBlockStore::default(); /// - /// let OpResult { root_dir: base_root, .. } = Rc::new(PublicDirectory::new(Utc::now())) + /// let PublicOpResult { root_dir: base_root, .. } = Rc::new(PublicDirectory::new(Utc::now())) /// .write( /// &["pictures".into(), "cats".into(), "tabby.png".into()], /// Cid::default(), @@ -836,7 +717,7 @@ impl PublicDirectory { /// .await /// .unwrap(); /// - /// let OpResult { root_dir: recent_root, .. } = Rc::clone(&base_root) + /// let PublicOpResult { root_dir: recent_root, .. } = Rc::clone(&base_root) /// .write( /// &["pictures".into(), "cats".into(), "katherine.png".into()], /// Cid::default(), @@ -846,7 +727,7 @@ impl PublicDirectory { /// .await /// .unwrap(); /// - /// let OpResult { root_dir: derived_root, .. } = recent_root + /// let PublicOpResult { root_dir: derived_root, .. } = recent_root /// .base_history_on(base_root, &mut store) /// .await /// .unwrap(); @@ -856,9 +737,9 @@ impl PublicDirectory { self: Rc, base: Rc, store: &mut B, - ) -> Result> { + ) -> Result> { if Rc::ptr_eq(&self, &base) { - return Ok(OpResult { + return Ok(PublicOpResult { root_dir: Rc::clone(&self), result: (), }); @@ -877,7 +758,7 @@ impl PublicDirectory { } } - Ok(OpResult { + Ok(PublicOpResult { root_dir: Rc::new(dir), result: (), }) @@ -935,7 +816,7 @@ impl PublicDirectory { /// ``` /// use std::{rc::Rc, pin::Pin}; /// - /// use wnfs::{public::{PublicDirectory, OpResult}, MemoryBlockStore}; + /// use wnfs::{public::{PublicDirectory, PublicOpResult}, MemoryBlockStore}; /// use libipld::cid::Cid; /// use chrono::Utc; /// use futures_util::pin_mut; @@ -947,7 +828,7 @@ impl PublicDirectory { /// let dir = Rc::new(PublicDirectory::new(time)); /// let mut store = MemoryBlockStore::default(); /// - /// let OpResult { root_dir: base_root, .. } = Rc::new(PublicDirectory::new(Utc::now())) + /// let PublicOpResult { root_dir: base_root, .. } = Rc::new(PublicDirectory::new(Utc::now())) /// .write( /// &["pictures".into(), "cats".into(), "tabby.png".into()], /// Cid::default(), @@ -957,7 +838,7 @@ impl PublicDirectory { /// .await /// .unwrap(); /// - /// let OpResult { root_dir: recent_root, .. } = Rc::clone(&base_root) + /// let PublicOpResult { root_dir: recent_root, .. } = Rc::clone(&base_root) /// .write( /// &["pictures".into(), "cats".into(), "katherine.png".into()], /// Cid::default(), @@ -967,7 +848,7 @@ impl PublicDirectory { /// .await /// .unwrap(); /// - /// let OpResult { root_dir: derived_root, .. } = recent_root + /// let PublicOpResult { root_dir: derived_root, .. } = recent_root /// .base_history_on(base_root, &mut store) /// .await /// .unwrap(); @@ -1004,11 +885,13 @@ impl Id for PublicDirectory { /// Implements async deserialization for serde serializable types. #[async_trait(?Send)] impl AsyncSerialize for PublicDirectory { - async fn async_serialize( - &self, - serializer: S, - store: &mut B, - ) -> Result { + type StoreRef = Cid; + + async fn async_serialize(&self, serializer: S, store: &mut RS) -> Result + where + S: Serializer, + RS: ReferenceableStore + ?Sized, + { let encoded_userland = { let mut map = BTreeMap::new(); for (name, link) in self.userland.iter() { @@ -1056,23 +939,6 @@ impl<'de> Deserialize<'de> for PublicDirectory { } } -//-------------------------------------------------------------------------------------------------- -// Utilities -//-------------------------------------------------------------------------------------------------- - -mod utils { - use anyhow::Result; - - use crate::{error, FsError}; - - pub(super) fn split_last(path_segments: &[String]) -> Result<(&[String], &String)> { - match path_segments.split_last() { - Some((last, rest)) => Ok((rest, last)), - None => error(FsError::InvalidPath), - } - } -} - //-------------------------------------------------------------------------------------------------- // Tests //-------------------------------------------------------------------------------------------------- @@ -1090,7 +956,7 @@ mod public_directory_tests { let content_cid = Cid::default(); let time = Utc::now(); - let OpResult { root_dir, .. } = root_dir + let PublicOpResult { root_dir, .. } = root_dir .write(&["text.txt".into()], content_cid, time, &store) .await .unwrap(); @@ -1149,12 +1015,12 @@ mod public_directory_tests { let time = Utc::now(); let store = MemoryBlockStore::default(); - let OpResult { root_dir, .. } = Rc::new(PublicDirectory::new(time)) + let PublicOpResult { root_dir, .. } = Rc::new(PublicDirectory::new(time)) .mkdir(&["tamedun".into(), "pictures".into()], time, &store) .await .unwrap(); - let OpResult { result, .. } = root_dir + let PublicOpResult { result, .. } = root_dir .get_node(&["tamedun".into(), "pictures".into()], &store) .await .unwrap(); @@ -1168,12 +1034,12 @@ mod public_directory_tests { let store = MemoryBlockStore::default(); let root_dir = Rc::new(PublicDirectory::new(time)); - let OpResult { root_dir, .. } = root_dir + let PublicOpResult { root_dir, .. } = root_dir .mkdir(&["tamedun".into(), "pictures".into()], time, &store) .await .unwrap(); - let OpResult { root_dir, .. } = root_dir + let PublicOpResult { root_dir, .. } = root_dir .write( &["tamedun".into(), "pictures".into(), "puppy.jpg".into()], Cid::default(), @@ -1183,7 +1049,7 @@ mod public_directory_tests { .await .unwrap(); - let OpResult { root_dir, .. } = root_dir + let PublicOpResult { root_dir, .. } = root_dir .mkdir( &["tamedun".into(), "pictures".into(), "cats".into()], time, @@ -1192,7 +1058,7 @@ mod public_directory_tests { .await .unwrap(); - let OpResult { result, .. } = root_dir + let PublicOpResult { result, .. } = root_dir .ls(&["tamedun".into(), "pictures".into()], &store) .await .unwrap(); @@ -1214,12 +1080,12 @@ mod public_directory_tests { let store = MemoryBlockStore::default(); let root_dir = Rc::new(PublicDirectory::new(time)); - let OpResult { root_dir, .. } = root_dir + let PublicOpResult { root_dir, .. } = root_dir .mkdir(&["tamedun".into(), "pictures".into()], time, &store) .await .unwrap(); - let OpResult { root_dir, .. } = root_dir + let PublicOpResult { root_dir, .. } = root_dir .write( &["tamedun".into(), "pictures".into(), "puppy.jpg".into()], Cid::default(), @@ -1229,7 +1095,7 @@ mod public_directory_tests { .await .unwrap(); - let OpResult { root_dir, .. } = root_dir + let PublicOpResult { root_dir, .. } = root_dir .mkdir( &["tamedun".into(), "pictures".into(), "cats".into()], time, @@ -1259,12 +1125,12 @@ mod public_directory_tests { let content_cid = Cid::default(); let time = Utc::now(); - let OpResult { root_dir, .. } = Rc::new(PublicDirectory::new(time)) + let PublicOpResult { root_dir, .. } = Rc::new(PublicDirectory::new(time)) .write(&["text.txt".into()], content_cid, time, &store) .await .unwrap(); - let OpResult { result, .. } = root_dir + let PublicOpResult { result, .. } = root_dir .read(&["text.txt".into()], &mut store) .await .unwrap(); @@ -1277,15 +1143,11 @@ mod public_directory_tests { let store = MemoryBlockStore::default(); let now = Utc::now(); - let path_nodes = PathNodes::new( - now, - &["Documents".into(), "Apps".into()], - Rc::new(PublicDirectory::new(now)), - ); - - let reconstructed = path_nodes.clone().reconstruct(); + let path_nodes = + PublicDirectory::create_path_nodes(&["Documents".into(), "Apps".into()], now); - let result = reconstructed + let fixed = PublicDirectory::fix_up_path_nodes(path_nodes.clone()); + let result = fixed .get_path_nodes(&["Documents".into(), "Apps".into()], &store) .await .unwrap(); @@ -1307,7 +1169,7 @@ mod public_directory_tests { let mut store = MemoryBlockStore::default(); let root_dir = Rc::new(PublicDirectory::new(time)); - let OpResult { + let PublicOpResult { root_dir: base_root, .. } = root_dir @@ -1320,7 +1182,7 @@ mod public_directory_tests { .await .unwrap(); - let OpResult { + let PublicOpResult { root_dir: updated_root, .. } = Rc::clone(&base_root) @@ -1333,7 +1195,7 @@ mod public_directory_tests { .await .unwrap(); - let OpResult { + let PublicOpResult { root_dir: derived_root, .. } = updated_root @@ -1349,7 +1211,7 @@ mod public_directory_tests { assert_eq!(derived_previous_cid.unwrap(), base_cid); // Assert that some node that exists between versions points to its old version. - let OpResult { + let PublicOpResult { result: derived_node, .. } = Rc::clone(&derived_root) @@ -1357,7 +1219,7 @@ mod public_directory_tests { .await .unwrap(); - let OpResult { + let PublicOpResult { result: base_node, .. } = base_root .get_node(&["pictures".into(), "cats".into()], &store) @@ -1374,7 +1236,7 @@ mod public_directory_tests { assert_eq!(derived_previous_cid.unwrap(), base_cid); // Assert that some node that doesn't exists between versions does not point to anything. - let OpResult { + let PublicOpResult { result: derived_node, .. } = Rc::clone(&derived_root) @@ -1395,7 +1257,7 @@ mod public_directory_tests { let store = MemoryBlockStore::default(); let root_dir = Rc::new(PublicDirectory::new(time)); - let OpResult { root_dir, .. } = root_dir + let PublicOpResult { root_dir, .. } = root_dir .write( &["pictures".into(), "cats".into(), "tabby.jpg".into()], Cid::default(), @@ -1405,7 +1267,7 @@ mod public_directory_tests { .await .unwrap(); - let OpResult { root_dir, .. } = root_dir + let PublicOpResult { root_dir, .. } = root_dir .write( &["pictures".into(), "cats".into(), "luna.png".into()], Cid::default(), @@ -1415,12 +1277,12 @@ mod public_directory_tests { .await .unwrap(); - let OpResult { root_dir, .. } = root_dir + let PublicOpResult { root_dir, .. } = root_dir .mkdir(&["images".into()], time, &store) .await .unwrap(); - let OpResult { root_dir, .. } = root_dir + let PublicOpResult { root_dir, .. } = root_dir .basic_mv( &["pictures".into(), "cats".into()], &["images".into(), "cats".into()], @@ -1430,12 +1292,14 @@ mod public_directory_tests { .await .unwrap(); - let OpResult { root_dir, result } = root_dir.ls(&["images".into()], &store).await.unwrap(); + let PublicOpResult { root_dir, result } = + root_dir.ls(&["images".into()], &store).await.unwrap(); assert_eq!(result.len(), 1); assert_eq!(result[0].0, String::from("cats")); - let OpResult { result, .. } = root_dir.ls(&["pictures".into()], &store).await.unwrap(); + let PublicOpResult { result, .. } = + root_dir.ls(&["pictures".into()], &store).await.unwrap(); assert_eq!(result.len(), 0); } @@ -1446,7 +1310,7 @@ mod public_directory_tests { let store = MemoryBlockStore::default(); let root_dir = Rc::new(PublicDirectory::new(time)); - let OpResult { root_dir, .. } = root_dir + let PublicOpResult { root_dir, .. } = root_dir .mkdir( &[ "videos".into(), @@ -1478,12 +1342,12 @@ mod public_directory_tests { let mut store = MemoryBlockStore::default(); let root_dir = Rc::new(PublicDirectory::new(time)); - let OpResult { root_dir, .. } = root_dir + let PublicOpResult { root_dir, .. } = root_dir .write(&["file.txt".into()], Cid::default(), time, &store) .await .unwrap(); - let OpResult { root_dir, .. } = root_dir + let PublicOpResult { root_dir, .. } = root_dir .basic_mv( &["file.txt".into()], &["renamed.txt".into()], @@ -1493,7 +1357,7 @@ mod public_directory_tests { .await .unwrap(); - let OpResult { result, .. } = root_dir + let PublicOpResult { result, .. } = root_dir .read(&["renamed.txt".into()], &mut store) .await .unwrap(); @@ -1507,12 +1371,12 @@ mod public_directory_tests { let store = MemoryBlockStore::default(); let root_dir = Rc::new(PublicDirectory::new(time)); - let OpResult { root_dir, .. } = root_dir + let PublicOpResult { root_dir, .. } = root_dir .mkdir(&["movies".into(), "ghibli".into()], time, &store) .await .unwrap(); - let OpResult { root_dir, .. } = root_dir + let PublicOpResult { root_dir, .. } = root_dir .write(&["file.txt".into()], Cid::default(), time, &store) .await .unwrap(); diff --git a/crates/fs/public/link.rs b/crates/fs/public/link.rs index a9ed9ba7..416b0658 100644 --- a/crates/fs/public/link.rs +++ b/crates/fs/public/link.rs @@ -25,11 +25,13 @@ impl PublicLink { } /// Creates a new directory node link. + #[inline] pub fn with_dir(dir: Rc) -> Self { Link::from(PublicNode::Dir(dir)) } /// Creates a new file node link. + #[inline] pub fn with_file(file: Rc) -> Self { Link::from(PublicNode::File(file)) } diff --git a/crates/fs/public/node.rs b/crates/fs/public/node.rs index eee0526d..79c3fac0 100644 --- a/crates/fs/public/node.rs +++ b/crates/fs/public/node.rs @@ -9,7 +9,9 @@ use libipld::{Cid, Ipld}; use serde::{de, Deserialize, Deserializer, Serialize, Serializer}; use super::{PublicDirectory, PublicFile}; -use crate::{common::BlockStore, AsyncSerialize, FsError, Id, Metadata, UnixFsNodeKind}; +use crate::{ + common::BlockStore, AsyncSerialize, FsError, Id, Metadata, ReferenceableStore, UnixFsNodeKind, +}; //-------------------------------------------------------------------------------------------------- // Type Definitions @@ -194,11 +196,13 @@ impl From for PublicNode { /// Implements async deserialization for serde serializable types. #[async_trait(?Send)] impl AsyncSerialize for PublicNode { - async fn async_serialize( - &self, - serializer: S, - store: &mut B, - ) -> Result { + type StoreRef = Cid; + + async fn async_serialize(&self, serializer: S, store: &mut RS) -> Result + where + S: Serializer, + RS: ReferenceableStore + ?Sized, + { match self { Self::File(file) => file.serialize(serializer), Self::Dir(dir) => dir.async_serialize(serializer, store).await, diff --git a/crates/fs/traits.rs b/crates/fs/traits.rs index 507beb3c..3d6b4039 100644 --- a/crates/fs/traits.rs +++ b/crates/fs/traits.rs @@ -1,33 +1,7 @@ -use std::rc::Rc; - use crate::BlockStore; use anyhow::Result; use async_trait::async_trait; -use libipld::{error::SerdeError, serde as ipld_serde, Ipld}; -use serde::Serialize; -use serde::Serializer; - -//-------------------------------------------------------------------------------------------------- -// Macros -//-------------------------------------------------------------------------------------------------- - -macro_rules! impl_async_serialize { - ( $( $ty:ty $( : < $( $generics:ident ),+ > )? ),+ ) => { - $( - #[async_trait(?Send)] - impl $( < $( $generics ),+ > )? AsyncSerialize for $ty $( where $( $generics: Serialize ),+ )? { - async fn async_serialize( - &self, - serializer: S, - _: &mut BS, - ) -> Result { - self.serialize(serializer) - } - } - )+ - }; -} //-------------------------------------------------------------------------------------------------- // Traits @@ -45,61 +19,3 @@ pub trait IpldEq { /// Checks if the two items are deeply equal. async fn eq(&self, other: &Self, store: &mut B) -> Result; } - -/// A **data structure** that can be serialized into any data format supported -/// by Serde. -/// -/// This trait is slightly different from Serde's Serialize trait because it allows for asynchronous -/// serialisation and it is designed for the IPLD ecosystem where a `BlockStore` is sometimes needed to -/// properly resolve the internal state of certain data structures to Cids. -/// -/// An example of this is the PublicDirectory which can contain links to other IPLD nodes. -/// These links need to be resolved to Cids during serialization if they aren't already. -#[async_trait(?Send)] -pub trait AsyncSerialize { - /// Serializes the type. - async fn async_serialize( - &self, - serializer: S, - store: &mut B, - ) -> Result; - - /// Serialize with an IPLD serializer. - async fn async_serialize_ipld( - &self, - store: &mut B, - ) -> Result { - self.async_serialize(ipld_serde::Serializer, store).await - } -} - -//-------------------------------------------------------------------------------------------------- -// Implementations -//-------------------------------------------------------------------------------------------------- - -#[async_trait(?Send)] -impl AsyncSerialize for Rc { - async fn async_serialize( - &self, - serializer: S, - store: &mut B, - ) -> Result { - self.as_ref().async_serialize(serializer, store).await - } -} - -impl_async_serialize! { usize, u128, u64, u32, u16, u8, isize, i128, i64, i32, i16, i8 } -impl_async_serialize! { String, &str } -impl_async_serialize! { - (A,): , - (A, B): , - (A, B, C): , - (A, B, C, D): , - (A, B, C, D, E): , - (A, B, C, D, E, F): , - (A, B, C, D, E, F, G): , - (A, B, C, D, E, F, G, H): , - (A, B, C, D, E, F, G, H, I): , - (A, B, C, D, E, F, G, H, I, J): , - (A, B, C, D, E, F, G, H, I, J, K): -} diff --git a/crates/proptest-regressions/private/hamt/node.txt b/crates/proptest-regressions/private/hamt/node.txt new file mode 100644 index 00000000..c1f521f0 --- /dev/null +++ b/crates/proptest-regressions/private/hamt/node.txt @@ -0,0 +1,8 @@ +# Seeds for failure cases proptest has generated in the past. It is +# automatically read and these particular cases re-run before any +# novel cases are generated. +# +# It is recommended to check this file in to source control so that +# everyone who runs the test benefits from these saved cases. +cc 1b041fdc46a495f11fb7eb853eaf58b5fdfdc31c81568f0ea15c5840fe3398a8 # shrinks to input = _TestInsertsRemovesArgs { operations: [Insert("1"), Remove("1")] } +cc 4a1bfa405c3bf3cc84a6c0e48bd6e96cce48d99c5c28341a842bc2a5e6d99c30 # shrinks to input = _NodeOperationsAreHistoryIndependentArgs { pair: (Operations([Insert("key 17", 508), Insert("key 81", 971), Insert("key 997", 365), Remove("key 17"), Insert("key 68", 870), Insert("key 304", 331)]), Operations([Insert("key 81", 971), Insert("key 17", 508), Insert("key 997", 365), Insert("key 304", 331), Insert("key 68", 870), Remove("key 17")])) } diff --git a/crates/wasm/Cargo.toml b/crates/wasm/Cargo.toml index b8b82984..99e657d0 100644 --- a/crates/wasm/Cargo.toml +++ b/crates/wasm/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "wasm-wnfs" -version = "0.1.7" +version = "0.1.8" description = "WebNative Filesystem API (WebAssembly)" keywords = ["wnfs", "webnative", "ipfs", "decentralisation"] categories = [ @@ -17,7 +17,7 @@ homepage = "https://fission.codes" authors = ["The Fission Authors"] [dependencies] -wnfs = { path = "../fs", version = "0.1.0" } +wnfs = { path = "../fs", version = "0.1.8" } wasm-bindgen = { version = "0.2", optional = true, features = ["serde-serialize"] } wasm-bindgen-futures = { version = "0.4", optional = true } js-sys = { version = "0.3", optional = true } diff --git a/crates/wasm/fs/mod.rs b/crates/wasm/fs/mod.rs index 5596332d..6199a17e 100644 --- a/crates/wasm/fs/mod.rs +++ b/crates/wasm/fs/mod.rs @@ -1,9 +1,12 @@ mod blockstore; mod metadata; +mod private; mod public; mod types; +mod utils; pub use blockstore::*; +pub use private::*; pub use public::*; pub type JsResult = Result; diff --git a/crates/wasm/fs/private/directory.rs b/crates/wasm/fs/private/directory.rs new file mode 100644 index 00000000..0940d966 --- /dev/null +++ b/crates/wasm/fs/private/directory.rs @@ -0,0 +1,270 @@ +//! The bindgen API for PrivateDirectory. + +use std::rc::Rc; + +use chrono::{DateTime, Utc}; +use js_sys::{Array, Date, Promise, Uint8Array}; +use wasm_bindgen::prelude::wasm_bindgen; +use wasm_bindgen::JsValue; +use wasm_bindgen_futures::future_to_promise; +use wnfs::{ + private::{ + INumber, PrivateDirectory as WnfsPrivateDirectory, PrivateOpResult as WnfsPrivateOpResult, + }, + HashOutput, Id, +}; + +use crate::{ + fs::{ + utils::{self, error}, + BlockStore, ForeignBlockStore, JsResult, Namefilter, PrivateForest, PrivateNode, Rng, + }, + value, +}; + +//-------------------------------------------------------------------------------------------------- +// Type Definitions +//-------------------------------------------------------------------------------------------------- + +/// A directory in a WNFS public file system. +#[wasm_bindgen] +pub struct PrivateDirectory(pub(crate) Rc); + +//-------------------------------------------------------------------------------------------------- +// Implementations +//-------------------------------------------------------------------------------------------------- + +#[wasm_bindgen] +impl PrivateDirectory { + /// Creates a new private directory. + #[wasm_bindgen(constructor)] + pub fn new( + parent_bare_name: Namefilter, + inumber: Vec, // [u8; 32] + ratchet_seed: Vec, // [u8; 32] + time: &Date, + ) -> JsResult { + let inumber: INumber = inumber + .try_into() + .map_err(error("Cannot convert inumber"))?; + + let ratchet_seed: HashOutput = ratchet_seed + .try_into() + .map_err(error("Cannot convert ratchet seed"))?; + + let time = DateTime::::from(time); + + Ok(Self(Rc::new(WnfsPrivateDirectory::new( + parent_bare_name.0, + inumber, + ratchet_seed, + time, + )))) + } + + /// Follows a path and fetches the node at the end of the path. + #[wasm_bindgen(js_name = "getNode")] + pub fn get_node( + &self, + path_segments: &Array, + hamt: PrivateForest, + store: BlockStore, + ) -> JsResult { + let directory = Rc::clone(&self.0); + let store = ForeignBlockStore(store); + let path_segments = utils::convert_path_segments(path_segments)?; + + Ok(future_to_promise(async move { + let WnfsPrivateOpResult { + root_dir, + hamt, + result, + } = directory + .get_node(&path_segments, hamt.0, &store) + .await + .map_err(error("Cannot get node"))?; + + Ok(utils::create_private_op_result( + root_dir, + hamt, + result.map(PrivateNode), + )?) + })) + } + + /// Looks up a node by its path name in the current directory. + #[wasm_bindgen(js_name = "lookupNode")] + pub fn lookup_node( + &self, + path_segment: &str, + hamt: PrivateForest, + store: BlockStore, + ) -> JsResult { + let directory = Rc::clone(&self.0); + let store = ForeignBlockStore(store); + let path_segment = path_segment.to_string(); + + Ok(future_to_promise(async move { + let found_node = directory + .lookup_node(&path_segment, &hamt.0, &store) + .await + .map_err(error("Cannot lookup node"))?; + + Ok(value!(found_node.map(PrivateNode))) + })) + } + + /// Reads specified file content from the directory. + pub fn read( + &self, + path_segments: &Array, + hamt: PrivateForest, + store: BlockStore, + ) -> JsResult { + let directory = Rc::clone(&self.0); + let store = ForeignBlockStore(store); + let path_segments = utils::convert_path_segments(path_segments)?; + + Ok(future_to_promise(async move { + let WnfsPrivateOpResult { + root_dir, + hamt, + result, + } = directory + .read(&path_segments, hamt.0, &store) + .await + .map_err(error("Cannot read from directory"))?; + + Ok(utils::create_private_op_result( + root_dir, + hamt, + Uint8Array::from(&result[..]), + )?) + })) + } + + /// Returns names and metadata of the direct children of a directory. + pub fn ls( + &self, + path_segments: &Array, + hamt: PrivateForest, + store: BlockStore, + ) -> JsResult { + let directory = Rc::clone(&self.0); + let store = ForeignBlockStore(store); + let path_segments = utils::convert_path_segments(path_segments)?; + + Ok(future_to_promise(async move { + let WnfsPrivateOpResult { + root_dir, + hamt, + result, + } = directory + .ls(&path_segments, hamt.0, &store) + .await + .map_err(error("Cannot list directory children"))?; + + let result = result + .iter() + .flat_map(|(name, metadata)| utils::create_ls_entry(name, metadata)) + .collect::(); + + Ok(utils::create_private_op_result(root_dir, hamt, result)?) + })) + } + + /// Removes a file or directory from the directory. + pub fn rm( + &self, + path_segments: &Array, + hamt: PrivateForest, + store: BlockStore, + mut rng: Rng, + ) -> JsResult { + let directory = Rc::clone(&self.0); + let mut store = ForeignBlockStore(store); + let path_segments = utils::convert_path_segments(path_segments)?; + + Ok(future_to_promise(async move { + let WnfsPrivateOpResult { + root_dir, + hamt, + result: node, + } = directory + .rm(&path_segments, hamt.0, &mut store, &mut rng) + .await + .map_err(error("Cannot remove from directory"))?; + + Ok(utils::create_private_op_result( + root_dir, + hamt, + PrivateNode(node), + )?) + })) + } + + /// Writes a file to the directory. + pub fn write( + &self, + path_segments: &Array, + content: Vec, + time: &Date, + hamt: PrivateForest, + store: BlockStore, + mut rng: Rng, + ) -> JsResult { + let directory = Rc::clone(&self.0); + let mut store = ForeignBlockStore(store); + let time = DateTime::::from(time); + let path_segments = utils::convert_path_segments(path_segments)?; + + Ok(future_to_promise(async move { + let WnfsPrivateOpResult { root_dir, hamt, .. } = directory + .write(&path_segments, time, content, hamt.0, &mut store, &mut rng) + .await + .map_err(error("Cannot write to directory"))?; + + Ok(utils::create_private_op_result( + root_dir, + hamt, + JsValue::NULL, + )?) + })) + } + + /// Creates a new directory at the specified path. + /// + /// This method acts like `mkdir -p` in Unix because it creates intermediate directories if they do not exist. + pub fn mkdir( + &self, + path_segments: &Array, + time: &Date, + hamt: PrivateForest, + store: BlockStore, + mut rng: Rng, + ) -> JsResult { + let directory = Rc::clone(&self.0); + let mut store = ForeignBlockStore(store); + let time = DateTime::::from(time); + let path_segments = utils::convert_path_segments(path_segments)?; + + Ok(future_to_promise(async move { + let WnfsPrivateOpResult { root_dir, hamt, .. } = directory + .mkdir(&path_segments, time, hamt.0, &mut store, &mut rng) + .await + .map_err(error("Cannot create directory: {e}"))?; + + Ok(utils::create_private_op_result( + root_dir, + hamt, + JsValue::NULL, + )?) + })) + } + + /// Gets a unique id for node. + #[wasm_bindgen(js_name = "getId")] + pub fn get_id(&self) -> String { + self.0.get_id() + } +} diff --git a/crates/wasm/fs/private/file.rs b/crates/wasm/fs/private/file.rs new file mode 100644 index 00000000..36af3cac --- /dev/null +++ b/crates/wasm/fs/private/file.rs @@ -0,0 +1,60 @@ +//! The bindgen API for PrivateFile. + +use chrono::{DateTime, Utc}; +use js_sys::Date; +use wasm_bindgen::prelude::wasm_bindgen; +use wnfs::{ + private::{INumber, PrivateFile as WnfsPrivateFile}, + HashOutput, Id, +}; + +use crate::fs::{utils::error, JsResult, Namefilter}; + +//-------------------------------------------------------------------------------------------------- +// Type Definitions +//-------------------------------------------------------------------------------------------------- + +/// A file in a WNFS public file system. +#[wasm_bindgen] +pub struct PrivateFile(WnfsPrivateFile); + +//-------------------------------------------------------------------------------------------------- +// Implementations +//-------------------------------------------------------------------------------------------------- + +#[wasm_bindgen] +impl PrivateFile { + /// Creates a new private file. + #[wasm_bindgen(constructor)] + pub fn new( + parent_bare_name: Namefilter, + inumber: Vec, // [u8; 32] + ratchet_seed: Vec, // [u8; 32] + time: &Date, + content: Vec, + ) -> JsResult { + let inumber: INumber = inumber + .try_into() + .map_err(error("Cannot convert inumber"))?; + + let ratchet_seed: HashOutput = ratchet_seed + .try_into() + .map_err(error("Cannot convert ratchet seed"))?; + + let time = DateTime::::from(time); + + Ok(PrivateFile(WnfsPrivateFile::new( + parent_bare_name.0, + inumber, + ratchet_seed, + time, + content, + ))) + } + + /// Gets a unique id for node. + #[wasm_bindgen(js_name = "getId")] + pub fn get_id(&self) -> String { + self.0.get_id() + } +} diff --git a/crates/wasm/fs/private/forest.rs b/crates/wasm/fs/private/forest.rs new file mode 100644 index 00000000..3e1df372 --- /dev/null +++ b/crates/wasm/fs/private/forest.rs @@ -0,0 +1,26 @@ +use std::rc::Rc; + +use wasm_bindgen::prelude::wasm_bindgen; +use wnfs::private::PrivateForest as WnfsPrivateForest; + +//-------------------------------------------------------------------------------------------------- +// Type Definitions +//-------------------------------------------------------------------------------------------------- + +/// A HAMT forest in a WNFS public file system. +#[wasm_bindgen] +pub struct PrivateForest(pub(crate) Rc); + +//-------------------------------------------------------------------------------------------------- +// Implementations +//-------------------------------------------------------------------------------------------------- + +#[wasm_bindgen] +impl PrivateForest { + /// Creates a new HAMT forest. + #[wasm_bindgen(constructor)] + #[allow(clippy::new_without_default)] + pub fn new() -> PrivateForest { + Self(Rc::new(WnfsPrivateForest::default())) + } +} diff --git a/crates/wasm/fs/private/mod.rs b/crates/wasm/fs/private/mod.rs new file mode 100644 index 00000000..b4d9cd19 --- /dev/null +++ b/crates/wasm/fs/private/mod.rs @@ -0,0 +1,13 @@ +mod directory; +mod file; +mod forest; +mod namefilter; +mod node; +mod rng; + +pub use directory::*; +pub use file::*; +pub use forest::*; +pub use namefilter::*; +pub use node::*; +pub use rng::*; diff --git a/crates/wasm/fs/private/namefilter.rs b/crates/wasm/fs/private/namefilter.rs new file mode 100644 index 00000000..68f93c45 --- /dev/null +++ b/crates/wasm/fs/private/namefilter.rs @@ -0,0 +1,23 @@ +use wasm_bindgen::prelude::wasm_bindgen; +use wnfs::private::namefilter::Namefilter as WnfsNamefilter; + +//-------------------------------------------------------------------------------------------------- +// Type Definitions +//-------------------------------------------------------------------------------------------------- + +#[wasm_bindgen] +pub struct Namefilter(pub(crate) WnfsNamefilter); + +//-------------------------------------------------------------------------------------------------- +// Implementations +//-------------------------------------------------------------------------------------------------- + +#[wasm_bindgen] +impl Namefilter { + /// Creates a new HAMT forest. + #[wasm_bindgen(constructor)] + #[allow(clippy::new_without_default)] + pub fn new() -> Namefilter { + Self(WnfsNamefilter::default()) + } +} diff --git a/crates/wasm/fs/private/node.rs b/crates/wasm/fs/private/node.rs new file mode 100644 index 00000000..a985413f --- /dev/null +++ b/crates/wasm/fs/private/node.rs @@ -0,0 +1,40 @@ +use js_sys::Error; +use wasm_bindgen::prelude::wasm_bindgen; +use wnfs::{private::PrivateNode as WnfsPrivateNode, Id}; + +use crate::fs::{JsResult, PrivateDirectory}; + +//-------------------------------------------------------------------------------------------------- +// Type Definitions +//-------------------------------------------------------------------------------------------------- + +/// Wraps a wnfs PublicNode. +#[wasm_bindgen] +pub struct PrivateNode(pub(crate) WnfsPrivateNode); + +//-------------------------------------------------------------------------------------------------- +// Implementations +//-------------------------------------------------------------------------------------------------- + +#[wasm_bindgen] +impl PrivateNode { + #[wasm_bindgen(js_name = "asDir")] + pub fn as_dir(&self) -> JsResult { + let dir = self + .0 + .as_dir() + .map_err(|e| Error::new(&format!("Cannot cast to a directory: {e}")))?; + + Ok(PrivateDirectory(dir)) + } + + #[wasm_bindgen(js_name = "isDir")] + pub fn is_dir(&self) -> bool { + self.0.is_dir() + } + + #[wasm_bindgen(js_name = "getId")] + pub fn get_id(&self) -> String { + self.0.get_id() + } +} diff --git a/crates/wasm/fs/private/rng.rs b/crates/wasm/fs/private/rng.rs new file mode 100644 index 00000000..56a81d25 --- /dev/null +++ b/crates/wasm/fs/private/rng.rs @@ -0,0 +1,25 @@ +use wasm_bindgen::prelude::wasm_bindgen; +use wnfs::private::Rng as WnfsRng; + +//-------------------------------------------------------------------------------------------------- +// Externs +//-------------------------------------------------------------------------------------------------- + +#[wasm_bindgen] +extern "C" { + #[wasm_bindgen(typescript_type = "Rng")] + pub type Rng; + + #[wasm_bindgen(method, js_name = "randomBytes")] + pub fn get_random_bytes(this: &Rng, count: usize) -> Vec; +} + +//-------------------------------------------------------------------------------------------------- +// Implementations +//-------------------------------------------------------------------------------------------------- + +impl WnfsRng for Rng { + fn random_bytes(&mut self) -> [u8; N] { + self.get_random_bytes(N).try_into().unwrap() + } +} diff --git a/crates/wasm/fs/public/directory.rs b/crates/wasm/fs/public/directory.rs index e806cf15..15ec5302 100644 --- a/crates/wasm/fs/public/directory.rs +++ b/crates/wasm/fs/public/directory.rs @@ -3,21 +3,29 @@ use std::rc::Rc; use chrono::{DateTime, Utc}; -use js_sys::{Array, Date, Error, Promise, Uint8Array}; +use js_sys::{Array, Date, Promise, Uint8Array}; use wasm_bindgen::{prelude::wasm_bindgen, JsValue}; use wasm_bindgen_futures::future_to_promise; use wnfs::{ ipld::Cid, public::{ - OpResult as WnfsOpResult, PublicDirectory as WnfsPublicDirectory, - PublicNode as WnfsPublicNode, + PublicDirectory as WnfsPublicDirectory, PublicNode as WnfsPublicNode, + PublicOpResult as WnfsPublicOpResult, }, BlockStore as WnfsBlockStore, Id, }; -use crate::fs::{metadata::JsMetadata, BlockStore, ForeignBlockStore, JsResult, PublicNode}; +use crate::fs::{ + metadata::JsMetadata, + utils::{self, error}, + BlockStore, ForeignBlockStore, JsResult, PublicNode, +}; use crate::value; +//-------------------------------------------------------------------------------------------------- +// Type Definitions +//-------------------------------------------------------------------------------------------------- + /// A directory in a WNFS public file system. #[wasm_bindgen] pub struct PublicDirectory(pub(crate) Rc); @@ -43,12 +51,15 @@ impl PublicDirectory { let path_segments = utils::convert_path_segments(path_segments)?; Ok(future_to_promise(async move { - let WnfsOpResult { root_dir, result } = directory + let WnfsPublicOpResult { root_dir, result } = directory .get_node(&path_segments, &store) .await - .map_err(|e| Error::new(&format!("Cannot get node: {e}")))?; + .map_err(error("Cannot get node"))?; - Ok(utils::create_op_result(root_dir, result.map(PublicNode))?) + Ok(utils::create_public_op_result( + root_dir, + result.map(PublicNode), + )?) })) } @@ -63,7 +74,7 @@ impl PublicDirectory { let found_node = directory .lookup_node(&path_segment, &store) .await - .map_err(|e| Error::new(&format!("Cannot lookup node: {e}")))?; + .map_err(error("Cannot lookup node"))?; Ok(value!(found_node.map(PublicNode))) })) @@ -78,7 +89,7 @@ impl PublicDirectory { let cid = directory .store(&mut store) .await - .map_err(|e| Error::new(&format!("Cannot add to store: {e}")))?; + .map_err(error("Cannot add to store"))?; let cid_u8array = Uint8Array::from(&cid.to_bytes()[..]); @@ -89,13 +100,13 @@ impl PublicDirectory { /// Loads a directory given its CID from the block store. pub fn load(cid: Vec, store: BlockStore) -> JsResult { let store = ForeignBlockStore(store); - let cid = - Cid::read_bytes(&cid[..]).map_err(|e| Error::new(&format!("Cannot parse cid: {e}")))?; + let cid = Cid::read_bytes(&cid[..]).map_err(error("Cannot parse cid"))?; + Ok(future_to_promise(async move { let directory: WnfsPublicDirectory = store .get_deserializable(&cid) .await - .map_err(|e| Error::new(&format!("Couldn't deserialize directory: {e}")))?; + .map_err(error("Couldn't deserialize directory"))?; Ok(value!(PublicDirectory(Rc::new(directory)))) })) @@ -108,35 +119,35 @@ impl PublicDirectory { let path_segments = utils::convert_path_segments(path_segments)?; Ok(future_to_promise(async move { - let WnfsOpResult { root_dir, result } = directory + let WnfsPublicOpResult { root_dir, result } = directory .read(&path_segments, &mut store) .await - .map_err(|e| Error::new(&format!("Cannot read from directory: {e}")))?; + .map_err(error("Cannot read from directory"))?; - let cid_u8array = Uint8Array::from(&result.to_bytes()[..]); + let result = Uint8Array::from(&result.to_bytes()[..]); - Ok(utils::create_op_result(root_dir, cid_u8array)?) + Ok(utils::create_public_op_result(root_dir, result)?) })) } - /// Returns the name and metadata of the direct children of a directory. + /// Returns names and metadata of the direct children of a directory. pub fn ls(&self, path_segments: &Array, store: BlockStore) -> JsResult { let directory = Rc::clone(&self.0); let store = ForeignBlockStore(store); let path_segments = utils::convert_path_segments(path_segments)?; Ok(future_to_promise(async move { - let WnfsOpResult { root_dir, result } = directory + let WnfsPublicOpResult { root_dir, result } = directory .ls(&path_segments, &store) .await - .map_err(|e| Error::new(&format!("Cannot list directory children: {e}")))?; + .map_err(error("Cannot list directory children"))?; let result = result .iter() .flat_map(|(name, metadata)| utils::create_ls_entry(name, metadata)) .collect::(); - Ok(utils::create_op_result(root_dir, result)?) + Ok(utils::create_public_op_result(root_dir, result)?) })) } @@ -147,15 +158,15 @@ impl PublicDirectory { let path_segments = utils::convert_path_segments(path_segments)?; Ok(future_to_promise(async move { - let WnfsOpResult { + let WnfsPublicOpResult { root_dir, result: node, } = directory .rm(&path_segments, &store) .await - .map_err(|e| Error::new(&format!("Cannot remove from directory: {e}")))?; + .map_err(error("Cannot remove from directory"))?; - Ok(utils::create_op_result(root_dir, PublicNode(node))?) + Ok(utils::create_public_op_result(root_dir, PublicNode(node))?) })) } @@ -170,18 +181,17 @@ impl PublicDirectory { let directory = Rc::clone(&self.0); let store = ForeignBlockStore(store); - let cid = - Cid::try_from(content_cid).map_err(|e| Error::new(&format!("Invalid CID: {e}")))?; + let cid = Cid::try_from(content_cid).map_err(error("Invalid CID"))?; let time = DateTime::::from(time); let path_segments = utils::convert_path_segments(path_segments)?; Ok(future_to_promise(async move { - let WnfsOpResult { root_dir, .. } = directory + let WnfsPublicOpResult { root_dir, .. } = directory .write(&path_segments, cid, time, &store) .await - .map_err(|e| Error::new(&format!("Cannot write to directory: {e}")))?; + .map_err(error("Cannot write to directory"))?; - Ok(utils::create_op_result(root_dir, JsValue::NULL)?) + Ok(utils::create_public_op_result(root_dir, JsValue::NULL)?) })) } @@ -194,19 +204,19 @@ impl PublicDirectory { time: &Date, store: BlockStore, ) -> JsResult { - let directory = self.0.clone(); + let directory = Rc::clone(&self.0); let store = ForeignBlockStore(store); let time = DateTime::::from(time); let path_segments_from = utils::convert_path_segments(path_segments_from)?; let path_segments_to = utils::convert_path_segments(path_segments_to)?; Ok(future_to_promise(async move { - let WnfsOpResult { root_dir, .. } = directory + let WnfsPublicOpResult { root_dir, .. } = directory .basic_mv(&path_segments_from, &path_segments_to, time, &store) .await - .map_err(|e| Error::new(&format!("Cannot create directory: {e}")))?; + .map_err(error("Cannot create directory"))?; - Ok(utils::create_op_result(root_dir, JsValue::NULL)?) + Ok(utils::create_public_op_result(root_dir, JsValue::NULL)?) })) } @@ -219,36 +229,34 @@ impl PublicDirectory { time: &Date, store: BlockStore, ) -> JsResult { - let directory = self.0.clone(); + let directory = Rc::clone(&self.0); let store = ForeignBlockStore(store); let time = DateTime::::from(time); let path_segments = utils::convert_path_segments(path_segments)?; Ok(future_to_promise(async move { - let WnfsOpResult { root_dir, .. } = directory + let WnfsPublicOpResult { root_dir, .. } = directory .mkdir(&path_segments, time, &store) .await - .map_err(|e| Error::new(&format!("Cannot create directory: {e}")))?; + .map_err(error("Cannot create directory"))?; - Ok(utils::create_op_result(root_dir, JsValue::NULL)?) + Ok(utils::create_public_op_result(root_dir, JsValue::NULL)?) })) } #[wasm_bindgen(js_name = "baseHistoryOn")] pub fn base_history_on(&self, base: &PublicDirectory, store: BlockStore) -> JsResult { - let directory = self.0.clone(); + let directory = Rc::clone(&self.0); let base = base.0.clone(); let mut store = ForeignBlockStore(store); Ok(future_to_promise(async move { - let WnfsOpResult { root_dir, .. } = directory + let WnfsPublicOpResult { root_dir, .. } = directory .base_history_on(base, &mut store) .await - .map_err(|e| { - Error::new(&format!("Cannot do history rebase (base_history_on): {e}")) - })?; + .map_err(error("Cannot do history rebase (base_history_on)"))?; - Ok(utils::create_op_result(root_dir, JsValue::NULL)?) + Ok(utils::create_public_op_result(root_dir, JsValue::NULL)?) })) } @@ -272,7 +280,7 @@ impl PublicDirectory { /// Converts directory to a node. #[wasm_bindgen(js_name = "asNode")] pub fn as_node(&self) -> PublicNode { - PublicNode(WnfsPublicNode::Dir(self.0.clone())) + PublicNode(WnfsPublicNode::Dir(Rc::clone(&self.0))) } /// Gets a unique id for node. @@ -281,62 +289,3 @@ impl PublicDirectory { self.0.get_id() } } - -//-------------------------------------------------------------------------------------------------- -// Utilities -//-------------------------------------------------------------------------------------------------- - -mod utils { - use std::rc::Rc; - - use crate::{fs::metadata::JsMetadata, fs::JsResult, value}; - use js_sys::{Array, Error, Object, Reflect}; - use wasm_bindgen::JsValue; - use wnfs::{public::PublicDirectory as WnfsPublicDirectory, Metadata}; - - use super::PublicDirectory; - - pub(crate) fn map_to_rust_vec JsResult>( - array: &Array, - f: F, - ) -> JsResult> { - array - .to_vec() - .into_iter() - .map(f) - .collect::>>() - } - - pub(crate) fn convert_path_segments(path_segments: &Array) -> JsResult> { - map_to_rust_vec(path_segments, |v| { - v.as_string() - .ok_or_else(|| Error::new("Invalid path segments: Expected an array of strings")) - }) - } - - pub(crate) fn create_op_result>( - root_dir: Rc, - result: T, - ) -> JsResult { - let op_result = Object::new(); - let root_dir = PublicDirectory(root_dir); - - Reflect::set(&op_result, &value!("rootDir"), &value!(root_dir))?; - Reflect::set(&op_result, &value!("result"), &result.into())?; - - Ok(value!(op_result)) - } - - pub(crate) fn create_ls_entry(name: &String, metadata: &Metadata) -> JsResult { - let entry = Object::new(); - - Reflect::set(&entry, &value!("name"), &value!(name))?; - Reflect::set( - &entry, - &value!("metadata"), - &JsMetadata(metadata).try_into()?, - )?; - - Ok(value!(entry)) - } -} diff --git a/crates/wasm/fs/public/file.rs b/crates/wasm/fs/public/file.rs index 3a495bcb..67f337a9 100644 --- a/crates/wasm/fs/public/file.rs +++ b/crates/wasm/fs/public/file.rs @@ -1,6 +1,5 @@ //! The bindgen API for PublicFile. -use crate::{fs::metadata::JsMetadata, value}; use chrono::{DateTime, Utc}; use js_sys::{Error, Promise, Uint8Array}; use std::rc::Rc; @@ -8,20 +7,30 @@ use wasm_bindgen::{prelude::wasm_bindgen, JsValue}; use wasm_bindgen_futures::future_to_promise; use wnfs::{ipld::Cid, public::PublicFile as WnfsPublicFile, BlockStore as WnfsBlockStore, Id}; -use crate::fs::{BlockStore, ForeignBlockStore, JsResult}; +use crate::{ + fs::{metadata::JsMetadata, utils::error, BlockStore, ForeignBlockStore, JsResult}, + value, +}; + +//-------------------------------------------------------------------------------------------------- +// Type Definitions +//-------------------------------------------------------------------------------------------------- /// A file in a WNFS public file system. #[wasm_bindgen] pub struct PublicFile(pub(crate) Rc); +//-------------------------------------------------------------------------------------------------- +// Implementations +//-------------------------------------------------------------------------------------------------- + #[wasm_bindgen] impl PublicFile { /// Creates a new file in a WNFS public file system. #[wasm_bindgen(constructor)] pub fn new(time: &js_sys::Date, cid: Vec) -> JsResult { let time = DateTime::::from(time); - - let cid = Cid::try_from(&cid[..]).map_err(|e| Error::new(&format!("Invalid CID: {e}")))?; + let cid = Cid::try_from(&cid[..]).map_err(error("Invalid CID"))?; Ok(PublicFile(Rc::new(WnfsPublicFile::new(time, cid)))) } diff --git a/crates/wasm/fs/public/node.rs b/crates/wasm/fs/public/node.rs index 4a073f3e..1a8584a9 100644 --- a/crates/wasm/fs/public/node.rs +++ b/crates/wasm/fs/public/node.rs @@ -4,10 +4,18 @@ use wnfs::{public::PublicNode as WnfsPublicNode, Id}; use crate::fs::{JsResult, PublicDirectory, PublicFile}; +//-------------------------------------------------------------------------------------------------- +// Type Definitions +//-------------------------------------------------------------------------------------------------- + /// Wraps a wnfs PublicNode. #[wasm_bindgen] pub struct PublicNode(pub(crate) WnfsPublicNode); +//-------------------------------------------------------------------------------------------------- +// Implementations +//-------------------------------------------------------------------------------------------------- + #[wasm_bindgen] impl PublicNode { #[wasm_bindgen(js_name = "asDir")] diff --git a/crates/wasm/fs/utils.rs b/crates/wasm/fs/utils.rs new file mode 100644 index 00000000..4c7642f6 --- /dev/null +++ b/crates/wasm/fs/utils.rs @@ -0,0 +1,88 @@ +use std::{fmt::Debug, rc::Rc}; + +use crate::{fs::JsResult, value}; +use js_sys::{Array, Error, Object, Reflect}; +use wasm_bindgen::JsValue; +use wnfs::{ + private::{PrivateDirectory as WnfsPrivateDirectory, PrivateForest as WnfsPrivateForest}, + public::PublicDirectory as WnfsPublicDirectory, + Metadata, +}; + +use super::{metadata::JsMetadata, PrivateDirectory, PrivateForest, PublicDirectory}; + +//-------------------------------------------------------------------------------------------------- +// Functions +//-------------------------------------------------------------------------------------------------- + +pub(crate) fn map_to_rust_vec JsResult>( + array: &Array, + f: F, +) -> JsResult> { + array + .to_vec() + .into_iter() + .map(f) + .collect::>>() +} + +pub(crate) fn convert_path_segments(path_segments: &Array) -> JsResult> { + map_to_rust_vec(path_segments, |v| { + v.as_string() + .ok_or_else(|| Error::new("Invalid path segments: Expected an array of strings")) + }) +} + +pub(crate) fn create_public_op_result>( + root_dir: Rc, + result: T, +) -> JsResult { + let op_result = Object::new(); + + Reflect::set( + &op_result, + &value!("rootDir"), + &PublicDirectory(root_dir).into(), + )?; + Reflect::set(&op_result, &value!("result"), &result.into())?; + + Ok(value!(op_result)) +} + +pub(crate) fn create_private_op_result>( + root_dir: Rc, + hamt: Rc, + result: T, +) -> JsResult { + let op_result = Object::new(); + + Reflect::set( + &op_result, + &value!("rootDir"), + &PrivateDirectory(root_dir).into(), + )?; + Reflect::set(&op_result, &value!("hamt"), &PrivateForest(hamt).into())?; + Reflect::set(&op_result, &value!("result"), &result.into())?; + + Ok(value!(op_result)) +} + +pub(crate) fn error(message: &str) -> impl FnOnce(E) -> js_sys::Error + '_ +where + E: Debug, +{ + move |e| Error::new(&format!("{message}: {e:?}")) +} + +pub(crate) fn create_ls_entry(name: &String, metadata: &Metadata) -> JsResult { + let entry = Object::new(); + + Reflect::set(&entry, &value!("name"), &value!(name))?; + Reflect::set( + &entry, + &value!("metadata"), + &JsMetadata(metadata).try_into()?, + )?; + + Ok(value!(entry)) +} diff --git a/crates/wasm/lib.rs b/crates/wasm/lib.rs index 63a999fa..98ae89de 100644 --- a/crates/wasm/lib.rs +++ b/crates/wasm/lib.rs @@ -48,5 +48,5 @@ macro_rules! value { #[macro_export] macro_rules! console_log { - ($($t:tt)*) => (crate::log(&format_args!($($t)*).to_string())) + ($($t:tt)*) => ($crate::log(&format_args!($($t)*).to_string())) } diff --git a/crates/wasm/tests/mock.ts b/crates/wasm/tests/mock.ts index f253a2fb..fd0b44be 100644 --- a/crates/wasm/tests/mock.ts +++ b/crates/wasm/tests/mock.ts @@ -34,4 +34,14 @@ class MemoryBlockStore { } } -export { sampleCID, MemoryBlockStore }; +/** A pseudo-random number generator */ +class Rng { + /** Returns random bytes of specified length */ + randomBytes(count: number): Uint8Array { + const array = new Uint8Array(count); + self.crypto.getRandomValues(array); + return array; + } +} + +export { sampleCID, MemoryBlockStore, Rng }; diff --git a/crates/wasm/tests/private.spec.ts b/crates/wasm/tests/private.spec.ts new file mode 100644 index 00000000..19222408 --- /dev/null +++ b/crates/wasm/tests/private.spec.ts @@ -0,0 +1,183 @@ +/// + +import { expect, test } from "@playwright/test"; + +const url = "http://localhost:8085"; + +test.beforeEach(async ({ page }) => { + await page.goto(url); +}); + +test.describe("PrivateDirectory", () => { + test("lookupNode can fetch file added to directory", async ({ page }) => { + const result = await page.evaluate(async () => { + const { + wnfs: { PrivateDirectory, PrivateForest, Namefilter }, + mock: { MemoryBlockStore, Rng }, + } = await window.setup(); + + const initialHamt = new PrivateForest(); + const rng = new Rng(); + const store = new MemoryBlockStore(); + const root = new PrivateDirectory( + new Namefilter(), + rng.randomBytes(32), + rng.randomBytes(32), + new Date() + ); + + var { rootDir, hamt } = await root.write(["text.txt"], new Uint8Array([1, 2, 3, 4, 5]), new Date(), initialHamt, store, rng); + + return await rootDir.lookupNode("text.txt", hamt, store); + }); + + expect(result).toBeDefined(); + }); + + test("lookupNode cannot fetch file not added to directory", async ({ + page, + }) => { + const result = await page.evaluate(async () => { + const { + wnfs: { PrivateDirectory, PrivateForest, Namefilter }, + mock: { MemoryBlockStore, Rng }, + } = await window.setup(); + + const initialHamt = new PrivateForest(); + const rng = new Rng(); + const store = new MemoryBlockStore(); + const root = new PrivateDirectory( + new Namefilter(), + rng.randomBytes(32), + rng.randomBytes(32), + new Date() + ); + + return await root.lookupNode("Unknown", initialHamt, store); + }); + + expect(result).toBe(undefined); + }); + + test("mkdir can create new directory", async ({ page }) => { + const result = await page.evaluate(async () => { + const { + wnfs: { PrivateDirectory, PrivateForest, Namefilter }, + mock: { MemoryBlockStore, Rng }, + } = await window.setup(); + + const initialHamt = new PrivateForest(); + const rng = new Rng(); + const store = new MemoryBlockStore(); + const root = new PrivateDirectory( + new Namefilter(), + rng.randomBytes(32), + rng.randomBytes(32), + new Date() + ); + + var { rootDir, hamt } = await root.mkdir(["pictures", "cats"], new Date(), initialHamt, store, rng); + + var { rootDir, hamt } = await rootDir.write( + ["pictures", "cats", "tabby.png"], + new Uint8Array([1, 2, 3, 4, 5]), + new Date(), + hamt, + store, + rng, + ); + + var { rootDir } = await rootDir.getNode( + ["pictures", "cats", "tabby.png"], + hamt, + store + ); + + return rootDir; + }); + + expect(result).toBeDefined(); + }); + + test("ls can list children under directory", async ({ page }) => { + const result = await page.evaluate(async () => { + const { + wnfs: { PrivateDirectory, PrivateForest, Namefilter }, + mock: { MemoryBlockStore, Rng }, + } = await window.setup(); + + const initialHamt = new PrivateForest(); + const rng = new Rng(); + const store = new MemoryBlockStore(); + const root = new PrivateDirectory( + new Namefilter(), + rng.randomBytes(32), + rng.randomBytes(32), + new Date() + ); + + var { rootDir, hamt } = await root.mkdir(["pictures", "dogs"], new Date(), initialHamt, store, rng); + + var { rootDir, hamt } = await rootDir.write( + ["pictures", "cats", "tabby.png"], + new Uint8Array([1, 2, 3, 4, 5]), + new Date(), + hamt, + store, + rng, + ); + + var { result } = await rootDir.ls(["pictures"], hamt, store); + + return result; + }); + + expect(result.length).toBe(2); + expect(result[0].name).toBe("cats"); + expect(result[1].name).toBe("dogs"); + }); + + test("rm can remove children from directory", async ({ page }) => { + const result = await page.evaluate(async () => { + const { + wnfs: { PrivateDirectory, PrivateForest, Namefilter }, + mock: { MemoryBlockStore, Rng }, + } = await window.setup(); + + const initialHamt = new PrivateForest(); + const rng = new Rng(); + const store = new MemoryBlockStore(); + const root = new PrivateDirectory( + new Namefilter(), + rng.randomBytes(32), + rng.randomBytes(32), + new Date() + ); + + var { rootDir, hamt } = await root.write( + ["pictures", "dogs", "billie.jpeg"], + new Uint8Array([1, 2, 3, 4, 5]), + new Date(), + initialHamt, + store, + rng, + ); + + var { rootDir, hamt } = await rootDir.write( + ["pictures", "cats", "tabby.png"], + new Uint8Array([1, 2, 3, 4, 5]), + new Date(), + hamt, + store, + rng, + ); + var { rootDir, hamt } = await rootDir.rm(["pictures", "cats"], hamt, store, rng); + var { result } = await rootDir.ls(["pictures"], hamt, store); + + return result; + }); + + expect(result.length).toEqual(1) + expect(result[0].name).toEqual("dogs"); + }); +}); diff --git a/crates/wasm/tests/fs.spec.ts b/crates/wasm/tests/public.spec.ts similarity index 100% rename from crates/wasm/tests/fs.spec.ts rename to crates/wasm/tests/public.spec.ts diff --git a/crates/wasm/tests/server/index.d.ts b/crates/wasm/tests/server/index.d.ts index 0d0920a2..bc0b87e0 100644 --- a/crates/wasm/tests/server/index.d.ts +++ b/crates/wasm/tests/server/index.d.ts @@ -1,4 +1,4 @@ -export {}; +export { }; declare global { interface Window { @@ -6,9 +6,17 @@ declare global { mock: { sampleCID: typeof import("../mock").sampleCID; MemoryBlockStore: typeof import("../mock").MemoryBlockStore; + Rng: typeof import("../mock").Rng; }; wnfs: { PublicDirectory: typeof import("../../pkg/index").PublicDirectory; + PublicFile: typeof import("../../pkg/index").PublicFile; + PublicNode: typeof import("../../pkg/index").PublicNode; + PrivateDirectory: typeof import("../../pkg/index").PrivateDirectory; + PrivateForest: typeof import("../../pkg/index").PrivateForest; + PrivateFile: typeof import("../../pkg/index").PrivateFile; + PrivateNode: typeof import("../../pkg/index").PrivateNode; + Namefilter: typeof import("../../pkg/index").Namefilter; }; }>; } diff --git a/crates/wasm/tests/server/index.ts b/crates/wasm/tests/server/index.ts index 3ba7dd60..863b9394 100644 --- a/crates/wasm/tests/server/index.ts +++ b/crates/wasm/tests/server/index.ts @@ -1,12 +1,12 @@ /// -import { sampleCID, MemoryBlockStore } from "../mock"; +import { sampleCID, MemoryBlockStore, Rng } from "../mock"; const setup = async () => { - const { PublicDirectory } = await import("../../pkg/index"); + const { PublicDirectory, PublicFile, PublicNode, PrivateDirectory, PrivateForest, PrivateFile, PrivateNode, Namefilter } = await import("../../pkg/index"); - const mock = { sampleCID, MemoryBlockStore }; - const wnfs = { PublicDirectory }; + const mock = { sampleCID, MemoryBlockStore, Rng }; + const wnfs = { PublicDirectory, PublicFile, PublicNode, PrivateDirectory, PrivateForest, PrivateFile, PrivateNode, Namefilter }; return { mock, wnfs }; };