diff --git a/Cargo.lock b/Cargo.lock index a79891e8..894d29bc 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -639,7 +639,7 @@ dependencies = [ "bincode", "bytes", "casper-json-rpc", - "casper-types-ver-2_0", + "casper-types", "datasize", "futures", "http", @@ -658,7 +658,7 @@ dependencies = [ "tempfile", "thiserror", "tokio", - "toml 0.5.11", + "toml", "tower", "tracing", "tracing-subscriber", @@ -682,65 +682,21 @@ dependencies = [ "thiserror", "tikv-jemallocator", "tokio", - "toml 0.5.11", + "toml", "tracing", "tracing-subscriber", ] [[package]] name = "casper-types" -version = "4.0.1" -dependencies = [ - "base16", - "base64 0.13.1", - "bincode", - "bitflags 1.3.2", - "blake2", - "criterion", - "datasize", - "derp", - "ed25519-dalek", - "getrandom", - "hex", - "hex_fmt", - "humantime", - "k256", - "num", - "num-derive", - "num-integer", - "num-rational", - "num-traits", - "once_cell", - "openssl", - "pem", - "proptest", - "proptest-attr-macro", - "proptest-derive", - "rand", - "rand_pcg", - "schemars", - "serde", - "serde_bytes", - "serde_json", - "serde_test", - "strum 0.24.1", - "tempfile", - "thiserror", - "uint", - "untrusted 0.7.1", - "version-sync", -] - -[[package]] -name = "casper-types-ver-2_0" version = "3.0.0" +source = "git+https://github.com/jacek-casper/casper-node?branch=sidecar-extracted#95280b1644fb661daaa8dddb353cf9e938f65c9b" dependencies = [ "base16", "base64 0.13.1", "bincode", "bitflags 1.3.2", "blake2", - "criterion", "datasize", "derive_more", "derp", @@ -758,10 +714,8 @@ dependencies = [ "num-rational", "num-traits", "once_cell", - "openssl", "pem", "proptest", - "proptest-attr-macro", "proptest-derive", "rand", "rand_pcg", @@ -770,22 +724,13 @@ dependencies = [ "serde-map-to-array", "serde_bytes", "serde_json", - "serde_test", "strum 0.24.1", - "tempfile", "thiserror", "tracing", "uint", "untrusted 0.7.1", - "version-sync", ] -[[package]] -name = "cast" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" - [[package]] name = "cc" version = "1.0.83" @@ -956,64 +901,6 @@ dependencies = [ "cfg-if", ] -[[package]] -name = "criterion" -version = "0.3.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b01d6de93b2b6c65e17c634a26653a29d107b3c98c607c765bf38d041531cd8f" -dependencies = [ - "atty", - "cast", - "clap 2.34.0", - "criterion-plot", - "csv", - "itertools 0.10.5", - "lazy_static", - "num-traits", - "oorandom", - "plotters", - "rayon", - "regex", - "serde", - "serde_cbor", - "serde_derive", - "serde_json", - "tinytemplate", - "walkdir", -] - -[[package]] -name = "criterion-plot" -version = "0.4.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2673cc8207403546f45f5fd319a974b1e6983ad1a3ee7e6041650013be041876" -dependencies = [ - "cast", - "itertools 0.10.5", -] - -[[package]] -name = "crossbeam-deque" -version = "0.8.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fca89a0e215bab21874660c67903c5f143333cab1da83d041c7ded6053774751" -dependencies = [ - "cfg-if", - "crossbeam-epoch", - "crossbeam-utils", -] - -[[package]] -name = "crossbeam-epoch" -version = "0.9.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e3681d554572a651dda4186cd47240627c3d0114d45a95f6ad27f2f22e7548d" -dependencies = [ - "autocfg", - "cfg-if", - "crossbeam-utils", -] - [[package]] name = "crossbeam-queue" version = "0.3.10" @@ -1071,27 +958,6 @@ dependencies = [ "subtle", ] -[[package]] -name = "csv" -version = "1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac574ff4d437a7b5ad237ef331c17ccca63c46479e5b5453eb8e10bb99a759fe" -dependencies = [ - "csv-core", - "itoa", - "ryu", - "serde", -] - -[[package]] -name = "csv-core" -version = "0.1.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5efa2b3d7902f4b634a20cae3c9c4e6209dc4779feb6863329607560143efa70" -dependencies = [ - "memchr", -] - [[package]] name = "ctor" version = "0.1.26" @@ -2220,12 +2086,6 @@ dependencies = [ "tracing", ] -[[package]] -name = "half" -version = "1.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eabb4a44450da02c90444cf74558da904edde8fb4e9035a9a6a4e15445af0bd7" - [[package]] name = "hashbrown" version = "0.12.3" @@ -3022,12 +2882,6 @@ version = "1.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" -[[package]] -name = "oorandom" -version = "11.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ab1bc2a289d34bd04a330323ac98a1b4bc82c9d9fcb1e66b63caa84da26b575" - [[package]] name = "opaque-debug" version = "0.3.0" @@ -3299,34 +3153,6 @@ version = "3.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "626dec3cac7cc0e1577a2ec3fc496277ec2baa084bebad95bb6fdbfae235f84c" -[[package]] -name = "plotters" -version = "0.3.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2c224ba00d7cadd4d5c660deaf2098e5e80e07846537c51f9cfa4be50c1fd45" -dependencies = [ - "num-traits", - "plotters-backend", - "plotters-svg", - "wasm-bindgen", - "web-sys", -] - -[[package]] -name = "plotters-backend" -version = "0.3.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e76628b4d3a7581389a35d5b6e2139607ad7c75b17aed325f210aa91f4a9609" - -[[package]] -name = "plotters-svg" -version = "0.3.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38f6d39893cca0701371e3c27294f09797214b86f1fb951b89ade8ec04e2abab" -dependencies = [ - "plotters-backend", -] - [[package]] name = "portpicker" version = "0.1.1" @@ -3468,17 +3294,6 @@ dependencies = [ "unarray", ] -[[package]] -name = "proptest-attr-macro" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fa06db3abc95f048e0afa371db5569b24912bb98a8e2e2e89c75c5b43bc2aa8" -dependencies = [ - "proc-macro2 1.0.75", - "quote 1.0.35", - "syn 1.0.109", -] - [[package]] name = "proptest-derive" version = "0.3.0" @@ -3496,17 +3311,6 @@ version = "2.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "106dd99e98437432fed6519dedecfade6a06a73bb7b2a1e019fdd2bee5778d94" -[[package]] -name = "pulldown-cmark" -version = "0.9.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77a1a2f1f0a7ecff9c31abbe177637be0e97a0aef46cf8738ece09327985d998" -dependencies = [ - "bitflags 1.3.2", - "memchr", - "unicase", -] - [[package]] name = "quick-error" version = "1.2.3" @@ -3579,26 +3383,6 @@ dependencies = [ "rand_core", ] -[[package]] -name = "rayon" -version = "1.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c27db03db7734835b3f53954b534c91069375ce6ccaa2e065441e07d9b6cdb1" -dependencies = [ - "either", - "rayon-core", -] - -[[package]] -name = "rayon-core" -version = "1.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ce3fb6ad83f861aac485e76e1985cd109d9a3713802152be56c3b1f0e0658ed" -dependencies = [ - "crossbeam-deque", - "crossbeam-utils", -] - [[package]] name = "redox_syscall" version = "0.2.16" @@ -4051,16 +3835,6 @@ dependencies = [ "serde", ] -[[package]] -name = "serde_cbor" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2bef2ebfde456fb76bbcf9f59315333decc4fda0b2b44b420243c11e0f5ec1f5" -dependencies = [ - "half", - "serde", -] - [[package]] name = "serde_derive" version = "1.0.194" @@ -4095,24 +3869,6 @@ dependencies = [ "serde", ] -[[package]] -name = "serde_spanned" -version = "0.6.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb3622f419d1296904700073ea6cc23ad690adbd66f13ea683df73298736f0c1" -dependencies = [ - "serde", -] - -[[package]] -name = "serde_test" -version = "1.0.176" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a2f49ace1498612d14f7e0b8245519584db8299541dfe31a06374a828d620ab" -dependencies = [ - "serde", -] - [[package]] name = "serde_urlencoded" version = "0.7.1" @@ -4863,16 +4619,6 @@ dependencies = [ "time-core", ] -[[package]] -name = "tinytemplate" -version = "1.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be4d6b5f19ff7664e8c98d03e2139cb510db9b0a60b55f8e8709b689d939b6bc" -dependencies = [ - "serde", - "serde_json", -] - [[package]] name = "tinyvec" version = "1.6.0" @@ -4986,40 +4732,6 @@ dependencies = [ "serde", ] -[[package]] -name = "toml" -version = "0.7.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd79e69d3b627db300ff956027cc6c3798cef26d22526befdfcd12feeb6d2257" -dependencies = [ - "serde", - "serde_spanned", - "toml_datetime", - "toml_edit", -] - -[[package]] -name = "toml_datetime" -version = "0.6.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3550f4e9685620ac18a50ed434eb3aec30db8ba93b0287467bca5826ea25baf1" -dependencies = [ - "serde", -] - -[[package]] -name = "toml_edit" -version = "0.19.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421" -dependencies = [ - "indexmap 2.1.0", - "serde", - "serde_spanned", - "toml_datetime", - "winnow", -] - [[package]] name = "tower" version = "0.4.13" @@ -5348,21 +5060,6 @@ dependencies = [ "time", ] -[[package]] -name = "version-sync" -version = "0.9.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "835169da0173ea373ddf5987632aac1f918967fbbe58195e304342282efa6089" -dependencies = [ - "proc-macro2 1.0.75", - "pulldown-cmark", - "regex", - "semver", - "syn 2.0.48", - "toml 0.7.8", - "url", -] - [[package]] name = "version_check" version = "0.9.4" diff --git a/Cargo.toml b/Cargo.toml index a6b57201..599ff78f 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,8 +1,6 @@ [workspace] resolver = "1" members = [ - "casper_types", - "casper_types_ver_2_0", "event_sidecar", "json_rpc", "listener", @@ -14,8 +12,7 @@ members = [ [workspace.dependencies] anyhow = "1" async-stream = "0.3.4" -casper-types = { path = "./casper_types", version = "4.0.1" } -casper-types-ver-2_0 = { version = "3.0.0", path = "./casper_types_ver_2_0" } +casper-types = { git = "https://github.com/jacek-casper/casper-node", branch="sidecar-extracted" } casper-event-sidecar = { path = "./event_sidecar", version = "1.0.0" } casper-rpc-sidecar = { path = "./rpc_sidecar", version = "1.0.0" } datasize = "0.2.11" diff --git a/casper_types/CHANGELOG.md b/casper_types/CHANGELOG.md deleted file mode 100644 index 08b78b25..00000000 --- a/casper_types/CHANGELOG.md +++ /dev/null @@ -1,200 +0,0 @@ -# Changelog - -All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog]. - -[comment]: <> (Added: new features) -[comment]: <> (Changed: changes in existing functionality) -[comment]: <> (Deprecated: soon-to-be removed features) -[comment]: <> (Removed: now removed features) -[comment]: <> (Fixed: any bug fixes) -[comment]: <> (Security: in case of vulnerabilities) - - - -## 4.0.1 - -### Added -* Add a new `SyncHandling` enum, which allows a node to opt out of historical sync. - -### Changed -* Update `k256` to version 0.13.1. - -### Removed -* Remove `ExecutionResult::successful_transfers`. - -### Security -* Update `ed25519-dalek` to version 2.0.0 as mitigation for [RUSTSEC-2022-0093](https://rustsec.org/advisories/RUSTSEC-2022-0093) - - - -## 3.0.0 - -### Added -* Add new `bytesrepr::Error::NotRepresentable` error variant that represents values that are not representable by the serialization format. -* Add new `Key::Unbond` key variant under which the new unbonding information (to support redelegation) is written. -* Add new `Key::ChainspecRegistry` key variant under which the `ChainspecRegistry` is written. -* Add new `Key::ChecksumRegistry` key variant under which a registry of checksums for a given block is written. There are two checksums in the registry, one for the execution results and the other for the approvals of all deploys in the block. -* Add new `StoredValue::Unbonding` variant to support redelegating. -* Add a new type `WithdrawPurses` which is meant to represent `UnbondingPurses` as they exist in current live networks. - -### Changed -* Extend `UnbondingPurse` to take a new field `new_validator` which represents the validator to whom tokens will be re-delegated. -* Increase `DICTIONARY_ITEM_KEY_MAX_LENGTH` to 128. -* Change prefix of formatted string representation of `ContractPackageHash` from "contract-package-wasm" to "contract-package-". Parsing from the old format is still supported. -* Apply `#[non_exhaustive]` to error enums. -* Change Debug output of `DeployHash` to hex-encoded string rather than a list of integers. - -### Fixed -* Fix some integer casts, where failure is now detected and reported via new error variant `NotRepresentable`. - - - -## 2.0.0 - -### Fixed -* Republish v1.6.0 as v2.0.0 due to missed breaking change in API (addition of new variant to `Key`). - - - -## 1.6.0 [YANKED] - -### Added -* Extend asymmetric key functionality, available via feature `std` (moved from `casper-nodes` crate). -* Provide `Timestamp` and `TimeDiff` types for time operations, with extended functionality available via feature `std` (moved from `casper-nodes` crate). -* Provide test-only functionality, in particular a seedable RNG `TestRng` which outputs its seed on test failure. Available via a new feature `testing`. -* Add new `Key::EraSummary` key variant under which the era summary info is written on each switch block execution. - -### Deprecated -* Deprecate `gens` feature: its functionality is included in the new `testing` feature. - - - -## 1.5.0 - -### Added -* Provide types and functionality to support improved access control inside execution engine. -* Provide `CLTyped` impl for `ContractPackage` to allow it to be passed into contracts. - -### Fixed -* Limit parsing of CLTyped objects to a maximum of 50 types deep. - - - -## 1.4.6 - 2021-12-29 - -### Changed -* Disable checksummed-hex encoding, but leave checksummed-hex decoding in place. - - - -## 1.4.5 - 2021-12-06 - -### Added -* Add function to `auction::MintProvider` trait to support minting into an existing purse. - -### Changed -* Change checksummed hex implementation to use 32 byte rather than 64 byte blake2b digests. - - - -## [1.4.4] - 2021-11-18 - -### Fixed -* Revert the accidental change to the `std` feature causing a broken build when this feature is enabled. - - - -## [1.4.3] - 2021-11-17 [YANKED] - - - -## [1.4.2] - 2021-11-13 [YANKED] - -### Added -* Add checksummed hex encoding following a scheme similar to [EIP-55](https://eips.ethereum.org/EIPS/eip-55). - - - -## [1.4.1] - 2021-10-23 - -No changes. - - - -## [1.4.0] - 2021-10-21 [YANKED] - -### Added -* Add `json-schema` feature, disabled by default, to enable many types to be used to produce JSON-schema data. -* Add implicit `datasize` feature, disabled by default, to enable many types to derive the `DataSize` trait. -* Add `StoredValue` types to this crate. - -### Changed -* Support building and testing using stable Rust. -* Allow longer hex string to be presented in `json` files. Current maximum is increased from 100 to 150 characters. -* Improve documentation and `Debug` impls for `ApiError`. - -### Deprecated -* Feature `std` is deprecated as it is now a no-op, since there is no benefit to linking the std lib via this crate. - - - -## [1.3.0] - 2021-07-19 - -### Changed -* Restrict summarization when JSON pretty-printing to contiguous long hex strings. -* Update pinned version of Rust to `nightly-2021-06-17`. - -### Removed -* Remove ability to clone `SecretKey`s. - - - -## [1.2.0] - 2021-05-27 - -### Changed -* Change to Apache 2.0 license. -* Return a `Result` from the constructor of `SecretKey` rather than potentially panicking. -* Improve `Key` error reporting and tests. - -### Fixed -* Fix `Key` deserialization. - - - -## [1.1.1] - 2021-04-19 - -No changes. - - - -## [1.1.0] - 2021-04-13 [YANKED] - -No changes. - - - -## [1.0.1] - 2021-04-08 - -No changes. - - - -## [1.0.0] - 2021-03-30 - -### Added -* Initial release of types for use by software compatible with Casper mainnet. - - - -[Keep a Changelog]: https://keepachangelog.com/en/1.0.0 -[unreleased]: https://github.com/casper-network/casper-node/compare/24fc4027a...dev -[1.4.3]: https://github.com/casper-network/casper-node/compare/2be27b3f5...24fc4027a -[1.4.2]: https://github.com/casper-network/casper-node/compare/v1.4.1...2be27b3f5 -[1.4.1]: https://github.com/casper-network/casper-node/compare/v1.4.0...v1.4.1 -[1.4.0]: https://github.com/casper-network/casper-node/compare/v1.3.0...v1.4.0 -[1.3.0]: https://github.com/casper-network/casper-node/compare/v1.2.0...v1.3.0 -[1.2.0]: https://github.com/casper-network/casper-node/compare/v1.1.1...v1.2.0 -[1.1.1]: https://github.com/casper-network/casper-node/compare/v1.0.1...v1.1.1 -[1.1.0]: https://github.com/casper-network/casper-node/compare/v1.0.1...v1.1.1 -[1.0.1]: https://github.com/casper-network/casper-node/compare/v1.0.0...v1.0.1 -[1.0.0]: https://github.com/casper-network/casper-node/releases/tag/v1.0.0 diff --git a/casper_types/Cargo.toml b/casper_types/Cargo.toml deleted file mode 100644 index 5f11687d..00000000 --- a/casper_types/Cargo.toml +++ /dev/null @@ -1,77 +0,0 @@ -[package] -name = "casper-types" -version = "4.0.1" # when updating, also update 'html_root_url' in lib.rs -authors = ["Fraser Hutchison "] -edition = "2021" -description = "Types shared by many casper crates for use on the Casper network." -readme = "README.md" -documentation = "https://docs.rs/casper-types" -homepage = "https://casperlabs.io" -repository = "https://github.com/CasperLabs/casper-node/tree/master/types" -license = "Apache-2.0" - -[dependencies] -base16 = { version = "0.2.1", default-features = false, features = ["alloc"] } -base64 = { version = "0.13.0", default-features = false } -bitflags = "1" -blake2 = { version = "0.9.0", default-features = false } -datasize = { workspace = true, optional = true } -derp = { version = "0.0.14", optional = true } -ed25519-dalek = { version = "2.0.0", default-features = false, features = ["alloc", "zeroize"] } -getrandom = { version = "0.2.0", features = ["rdrand"], optional = true } -hex = { version = "0.4.2", default-features = false, features = ["alloc"] } -hex_fmt = "0.3.0" -humantime = { version = "2", optional = true } -k256 = { version = "0.13.1", default-features = false, features = ["ecdsa", "sha256"] } -num = { version = "0.4.0", default-features = false, features = ["alloc"] } -num-derive = { version = "0.3.0", default-features = false } -num-integer = { version = "0.1.42", default-features = false } -num-rational = { version = "0.4.0", default-features = false } -num-traits = { version = "0.2.10", default-features = false } -once_cell = { workspace = true, optional = true } -pem = { version = "0.8.1", optional = true } -proptest = { version = "1.0.0", optional = true } -proptest-derive = { version = "0.3.0", optional = true } -rand = { version = "0.8.3", default-features = false, features = ["small_rng"] } -rand_pcg = { version = "0.3.0", optional = true } -schemars = { version = "=0.8.16", features = ["preserve_order"], optional = true } -serde = { workspace = true, default-features = false, features = ["alloc", "derive"] } -serde_bytes = { version = "0.11.5", default-features = false, features = ["alloc"] } -serde_json = { version = "1.0.59", default-features = false, features = ["alloc"] } -strum = { version = "0.24", features = ["derive"], optional = true } -thiserror = { workspace = true, optional = true } -uint = { version = "0.9.0", default-features = false } -untrusted = { version = "0.7.1", optional = true } -version-sync = { version = "0.9", optional = true } - -[dev-dependencies] -bincode = "1.3.1" -criterion = "0.3.5" -derp = "0.0.14" -getrandom = "0.2.0" -humantime = "2" -once_cell = {workspace = true} -openssl = "0.10.32" -pem = "0.8.1" -proptest = "1.0.0" -proptest-derive = "0.3.0" -proptest-attr-macro = "1.0.0" -rand = "0.8.3" -rand_pcg = "0.3.0" -serde_json = "1" -serde_test = "1" -strum = { version = "0.24", features = ["derive"] } -tempfile = "3.4.0" -thiserror = { workspace = true } -untrusted = "0.7.1" - -[features] -json-schema = ["once_cell", "schemars"] -std = ["derp", "getrandom/std", "humantime", "once_cell", "pem", "serde_json/preserve_order", "thiserror", "untrusted"] -testing = ["proptest", "proptest-derive", "rand_pcg", "strum"] -# DEPRECATED - use "testing" instead of "gens". -gens = ["testing"] - -[[bench]] -name = "bytesrepr_bench" -harness = false diff --git a/casper_types/README.md b/casper_types/README.md deleted file mode 100644 index 46f14ea2..00000000 --- a/casper_types/README.md +++ /dev/null @@ -1,22 +0,0 @@ -# `casper-types` - -[![LOGO](https://raw.githubusercontent.com/casper-network/casper-node/master/images/casper-association-logo-primary.svg)](https://casper.network/) - -[![Build Status](https://drone-auto-casper-network.casperlabs.io/api/badges/casper-network/casper-node/status.svg?branch=dev)](http://drone-auto-casper-network.casperlabs.io/casper-network/casper-node) -[![Crates.io](https://img.shields.io/crates/v/casper-types)](https://crates.io/crates/casper-types) -[![Documentation](https://docs.rs/casper-types/badge.svg)](https://docs.rs/casper-types) -[![License](https://img.shields.io/badge/license-Apache-blue)](https://github.com/CasperLabs/casper-node/blob/master/LICENSE) - -Types shared by many casper crates for use on the Casper network. - -## `no_std` - -The crate is `no_std` (using the `core` and `alloc` crates) unless any of the following features are enabled: - -* `json-schema` to enable many types to be used to produce JSON-schema data via the [`schemars`](https://crates.io/crates/schemars) crate -* `datasize` to enable many types to derive the [`DataSize`](https://github.com/casperlabs/datasize-rs) trait -* `gens` to enable many types to be produced in accordance with [`proptest`](https://crates.io/crates/proptest) usage for consumption within dependee crates' property testing suites - -## License - -Licensed under the [Apache License Version 2.0](https://github.com/casper-network/casper-node/blob/master/LICENSE). diff --git a/casper_types/benches/bytesrepr_bench.rs b/casper_types/benches/bytesrepr_bench.rs deleted file mode 100644 index ac4e360e..00000000 --- a/casper_types/benches/bytesrepr_bench.rs +++ /dev/null @@ -1,894 +0,0 @@ -use criterion::{black_box, criterion_group, criterion_main, Bencher, Criterion}; - -use std::{ - collections::{BTreeMap, BTreeSet}, - iter, -}; - -use casper_types::{ - account::{Account, AccountHash, ActionThresholds, AssociatedKeys, Weight}, - bytesrepr::{self, Bytes, FromBytes, ToBytes}, - contracts::{ContractPackageStatus, NamedKeys}, - system::auction::{Bid, Delegator, EraInfo, SeigniorageAllocation}, - AccessRights, CLType, CLTyped, CLValue, Contract, ContractHash, ContractPackage, - ContractPackageHash, ContractVersionKey, ContractWasmHash, DeployHash, DeployInfo, EntryPoint, - EntryPointAccess, EntryPointType, EntryPoints, Group, Key, Parameter, ProtocolVersion, - PublicKey, SecretKey, Transfer, TransferAddr, URef, KEY_HASH_LENGTH, TRANSFER_ADDR_LENGTH, - U128, U256, U512, UREF_ADDR_LENGTH, -}; - -static KB: usize = 1024; -static BATCH: usize = 4 * KB; - -const TEST_I32: i32 = 123_456_789; -const TEST_U128: U128 = U128([123_456_789, 0]); -const TEST_U256: U256 = U256([123_456_789, 0, 0, 0]); -const TEST_U512: U512 = U512([123_456_789, 0, 0, 0, 0, 0, 0, 0]); -const TEST_STR_1: &str = "String One"; -const TEST_STR_2: &str = "String Two"; - -fn prepare_vector(size: usize) -> Vec { - (0..size as i32).collect() -} - -fn serialize_vector_of_i32s(b: &mut Bencher) { - let data = prepare_vector(black_box(BATCH)); - b.iter(|| data.to_bytes()); -} - -fn deserialize_vector_of_i32s(b: &mut Bencher) { - let data = prepare_vector(black_box(BATCH)).to_bytes().unwrap(); - b.iter(|| { - let (res, _rem): (Vec, _) = FromBytes::from_bytes(&data).unwrap(); - res - }); -} - -fn serialize_vector_of_u8(b: &mut Bencher) { - // 0, 1, ... 254, 255, 0, 1, ... - let data: Bytes = prepare_vector(BATCH) - .into_iter() - .map(|value| value as u8) - .collect(); - b.iter(|| ToBytes::to_bytes(black_box(&data))); -} - -fn deserialize_vector_of_u8(b: &mut Bencher) { - // 0, 1, ... 254, 255, 0, 1, ... - let data: Vec = prepare_vector(BATCH) - .into_iter() - .map(|value| value as u8) - .collect::() - .to_bytes() - .unwrap(); - b.iter(|| Bytes::from_bytes(black_box(&data))) -} - -fn serialize_u8(b: &mut Bencher) { - b.iter(|| ToBytes::to_bytes(black_box(&129u8))); -} - -fn deserialize_u8(b: &mut Bencher) { - b.iter(|| u8::from_bytes(black_box(&[129u8]))); -} - -fn serialize_i32(b: &mut Bencher) { - b.iter(|| ToBytes::to_bytes(black_box(&1_816_142_132i32))); -} - -fn deserialize_i32(b: &mut Bencher) { - b.iter(|| i32::from_bytes(black_box(&[0x34, 0x21, 0x40, 0x6c]))); -} - -fn serialize_u64(b: &mut Bencher) { - b.iter(|| ToBytes::to_bytes(black_box(&14_157_907_845_468_752_670u64))); -} - -fn deserialize_u64(b: &mut Bencher) { - b.iter(|| u64::from_bytes(black_box(&[0x1e, 0x8b, 0xe1, 0x73, 0x2c, 0xfe, 0x7a, 0xc4]))); -} - -fn serialize_some_u64(b: &mut Bencher) { - let data = Some(14_157_907_845_468_752_670u64); - - b.iter(|| ToBytes::to_bytes(black_box(&data))); -} - -fn deserialize_some_u64(b: &mut Bencher) { - let data = Some(14_157_907_845_468_752_670u64); - let data = data.to_bytes().unwrap(); - - b.iter(|| Option::::from_bytes(&data)); -} - -fn serialize_none_u64(b: &mut Bencher) { - let data: Option = None; - - b.iter(|| ToBytes::to_bytes(black_box(&data))); -} - -fn deserialize_ok_u64(b: &mut Bencher) { - let data: Option = None; - let data = data.to_bytes().unwrap(); - b.iter(|| Option::::from_bytes(&data)); -} - -fn make_test_vec_of_vec8() -> Vec { - (0..4) - .map(|_v| { - // 0, 1, 2, ..., 254, 255 - let inner_vec = iter::repeat_with(|| 0..255u8) - .flatten() - // 4 times to create 4x 1024 bytes - .take(4) - .collect::>(); - Bytes::from(inner_vec) - }) - .collect() -} - -fn serialize_vector_of_vector_of_u8(b: &mut Bencher) { - let data = make_test_vec_of_vec8(); - b.iter(|| data.to_bytes()); -} - -fn deserialize_vector_of_vector_of_u8(b: &mut Bencher) { - let data = make_test_vec_of_vec8().to_bytes().unwrap(); - b.iter(|| Vec::::from_bytes(black_box(&data))); -} - -fn serialize_tree_map(b: &mut Bencher) { - let data = { - let mut res = BTreeMap::new(); - res.insert("asdf".to_string(), "zxcv".to_string()); - res.insert("qwer".to_string(), "rewq".to_string()); - res.insert("1234".to_string(), "5678".to_string()); - res - }; - - b.iter(|| ToBytes::to_bytes(black_box(&data))); -} - -fn deserialize_treemap(b: &mut Bencher) { - let data = { - let mut res = BTreeMap::new(); - res.insert("asdf".to_string(), "zxcv".to_string()); - res.insert("qwer".to_string(), "rewq".to_string()); - res.insert("1234".to_string(), "5678".to_string()); - res - }; - let data = data.to_bytes().unwrap(); - b.iter(|| BTreeMap::::from_bytes(black_box(&data))); -} - -fn serialize_string(b: &mut Bencher) { - let lorem = "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua."; - let data = lorem.to_string(); - b.iter(|| ToBytes::to_bytes(black_box(&data))); -} - -fn deserialize_string(b: &mut Bencher) { - let lorem = "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua."; - let data = lorem.to_bytes().unwrap(); - b.iter(|| String::from_bytes(&data)); -} - -fn serialize_vec_of_string(b: &mut Bencher) { - let lorem = "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.".to_string(); - let array_of_lorem: Vec = lorem.split(' ').map(Into::into).collect(); - let data = array_of_lorem; - b.iter(|| ToBytes::to_bytes(black_box(&data))); -} - -fn deserialize_vec_of_string(b: &mut Bencher) { - let lorem = "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.".to_string(); - let array_of_lorem: Vec = lorem.split(' ').map(Into::into).collect(); - let data = array_of_lorem.to_bytes().unwrap(); - - b.iter(|| Vec::::from_bytes(&data)); -} - -fn serialize_unit(b: &mut Bencher) { - b.iter(|| ToBytes::to_bytes(black_box(&()))) -} - -fn deserialize_unit(b: &mut Bencher) { - let data = ().to_bytes().unwrap(); - - b.iter(|| <()>::from_bytes(&data)) -} - -fn serialize_key_account(b: &mut Bencher) { - let account = Key::Account(AccountHash::new([0u8; 32])); - - b.iter(|| ToBytes::to_bytes(black_box(&account))) -} - -fn deserialize_key_account(b: &mut Bencher) { - let account = Key::Account(AccountHash::new([0u8; 32])); - let account_bytes = account.to_bytes().unwrap(); - - b.iter(|| Key::from_bytes(black_box(&account_bytes))) -} - -fn serialize_key_hash(b: &mut Bencher) { - let hash = Key::Hash([0u8; 32]); - b.iter(|| ToBytes::to_bytes(black_box(&hash))) -} - -fn deserialize_key_hash(b: &mut Bencher) { - let hash = Key::Hash([0u8; 32]); - let hash_bytes = hash.to_bytes().unwrap(); - - b.iter(|| Key::from_bytes(black_box(&hash_bytes))) -} - -fn serialize_key_uref(b: &mut Bencher) { - let uref = Key::URef(URef::new([0u8; 32], AccessRights::ADD_WRITE)); - b.iter(|| ToBytes::to_bytes(black_box(&uref))) -} - -fn deserialize_key_uref(b: &mut Bencher) { - let uref = Key::URef(URef::new([0u8; 32], AccessRights::ADD_WRITE)); - let uref_bytes = uref.to_bytes().unwrap(); - - b.iter(|| Key::from_bytes(black_box(&uref_bytes))) -} - -fn serialize_vec_of_keys(b: &mut Bencher) { - let keys: Vec = (0..32) - .map(|i| Key::URef(URef::new([i; 32], AccessRights::ADD_WRITE))) - .collect(); - b.iter(|| ToBytes::to_bytes(black_box(&keys))) -} - -fn deserialize_vec_of_keys(b: &mut Bencher) { - let keys: Vec = (0..32) - .map(|i| Key::URef(URef::new([i; 32], AccessRights::ADD_WRITE))) - .collect(); - let keys_bytes = keys.to_bytes().unwrap(); - b.iter(|| Vec::::from_bytes(black_box(&keys_bytes))); -} - -fn serialize_access_rights_read(b: &mut Bencher) { - b.iter(|| AccessRights::READ.to_bytes()); -} - -fn deserialize_access_rights_read(b: &mut Bencher) { - let data = AccessRights::READ.to_bytes().unwrap(); - b.iter(|| AccessRights::from_bytes(&data)); -} - -fn serialize_access_rights_write(b: &mut Bencher) { - b.iter(|| AccessRights::WRITE.to_bytes()); -} - -fn deserialize_access_rights_write(b: &mut Bencher) { - let data = AccessRights::WRITE.to_bytes().unwrap(); - b.iter(|| AccessRights::from_bytes(&data)); -} - -fn serialize_access_rights_add(b: &mut Bencher) { - b.iter(|| AccessRights::ADD.to_bytes()); -} - -fn deserialize_access_rights_add(b: &mut Bencher) { - let data = AccessRights::ADD.to_bytes().unwrap(); - b.iter(|| AccessRights::from_bytes(&data)); -} - -fn serialize_access_rights_read_add(b: &mut Bencher) { - b.iter(|| AccessRights::READ_ADD.to_bytes()); -} - -fn deserialize_access_rights_read_add(b: &mut Bencher) { - let data = AccessRights::READ_ADD.to_bytes().unwrap(); - b.iter(|| AccessRights::from_bytes(&data)); -} - -fn serialize_access_rights_read_write(b: &mut Bencher) { - b.iter(|| AccessRights::READ_WRITE.to_bytes()); -} - -fn deserialize_access_rights_read_write(b: &mut Bencher) { - let data = AccessRights::READ_WRITE.to_bytes().unwrap(); - b.iter(|| AccessRights::from_bytes(&data)); -} - -fn serialize_access_rights_add_write(b: &mut Bencher) { - b.iter(|| AccessRights::ADD_WRITE.to_bytes()); -} - -fn deserialize_access_rights_add_write(b: &mut Bencher) { - let data = AccessRights::ADD_WRITE.to_bytes().unwrap(); - b.iter(|| AccessRights::from_bytes(&data)); -} - -fn serialize_cl_value(raw_value: T) -> Vec { - CLValue::from_t(raw_value) - .expect("should create CLValue") - .to_bytes() - .expect("should serialize CLValue") -} - -fn benchmark_deserialization(b: &mut Bencher, raw_value: T) { - let serialized_value = serialize_cl_value(raw_value); - b.iter(|| { - let cl_value: CLValue = bytesrepr::deserialize_from_slice(&serialized_value).unwrap(); - let _raw_value: T = cl_value.into_t().unwrap(); - }); -} - -fn serialize_cl_value_int32(b: &mut Bencher) { - b.iter(|| serialize_cl_value(TEST_I32)); -} - -fn deserialize_cl_value_int32(b: &mut Bencher) { - benchmark_deserialization(b, TEST_I32); -} - -fn serialize_cl_value_uint128(b: &mut Bencher) { - b.iter(|| serialize_cl_value(TEST_U128)); -} - -fn deserialize_cl_value_uint128(b: &mut Bencher) { - benchmark_deserialization(b, TEST_U128); -} - -fn serialize_cl_value_uint256(b: &mut Bencher) { - b.iter(|| serialize_cl_value(TEST_U256)); -} - -fn deserialize_cl_value_uint256(b: &mut Bencher) { - benchmark_deserialization(b, TEST_U256); -} - -fn serialize_cl_value_uint512(b: &mut Bencher) { - b.iter(|| serialize_cl_value(TEST_U512)); -} - -fn deserialize_cl_value_uint512(b: &mut Bencher) { - benchmark_deserialization(b, TEST_U512); -} - -fn serialize_cl_value_bytearray(b: &mut Bencher) { - b.iter_with_setup( - || { - let vec: Vec = (0..255).collect(); - Bytes::from(vec) - }, - serialize_cl_value, - ); -} - -fn deserialize_cl_value_bytearray(b: &mut Bencher) { - let vec = (0..255).collect::>(); - let bytes: Bytes = vec.into(); - benchmark_deserialization(b, bytes); -} - -fn serialize_cl_value_listint32(b: &mut Bencher) { - b.iter(|| serialize_cl_value((0..1024).collect::>())); -} - -fn deserialize_cl_value_listint32(b: &mut Bencher) { - benchmark_deserialization(b, (0..1024).collect::>()); -} - -fn serialize_cl_value_string(b: &mut Bencher) { - b.iter(|| serialize_cl_value(TEST_STR_1.to_string())); -} - -fn deserialize_cl_value_string(b: &mut Bencher) { - benchmark_deserialization(b, TEST_STR_1.to_string()); -} - -fn serialize_cl_value_liststring(b: &mut Bencher) { - b.iter(|| serialize_cl_value(vec![TEST_STR_1.to_string(), TEST_STR_2.to_string()])); -} - -fn deserialize_cl_value_liststring(b: &mut Bencher) { - benchmark_deserialization(b, vec![TEST_STR_1.to_string(), TEST_STR_2.to_string()]); -} - -fn serialize_cl_value_namedkey(b: &mut Bencher) { - b.iter(|| { - serialize_cl_value(( - TEST_STR_1.to_string(), - Key::Account(AccountHash::new([0xffu8; 32])), - )) - }); -} - -fn deserialize_cl_value_namedkey(b: &mut Bencher) { - benchmark_deserialization( - b, - ( - TEST_STR_1.to_string(), - Key::Account(AccountHash::new([0xffu8; 32])), - ), - ); -} - -fn serialize_u128(b: &mut Bencher) { - let num_u128 = U128::default(); - b.iter(|| ToBytes::to_bytes(black_box(&num_u128))) -} - -fn deserialize_u128(b: &mut Bencher) { - let num_u128 = U128::default(); - let num_u128_bytes = num_u128.to_bytes().unwrap(); - - b.iter(|| U128::from_bytes(black_box(&num_u128_bytes))) -} - -fn serialize_u256(b: &mut Bencher) { - let num_u256 = U256::default(); - b.iter(|| ToBytes::to_bytes(black_box(&num_u256))) -} - -fn deserialize_u256(b: &mut Bencher) { - let num_u256 = U256::default(); - let num_u256_bytes = num_u256.to_bytes().unwrap(); - - b.iter(|| U256::from_bytes(black_box(&num_u256_bytes))) -} - -fn serialize_u512(b: &mut Bencher) { - let num_u512 = U512::default(); - b.iter(|| ToBytes::to_bytes(black_box(&num_u512))) -} - -fn deserialize_u512(b: &mut Bencher) { - let num_u512 = U512::default(); - let num_u512_bytes = num_u512.to_bytes().unwrap(); - - b.iter(|| U512::from_bytes(black_box(&num_u512_bytes))) -} - -fn sample_account(associated_keys_len: u8, named_keys_len: u8) -> Account { - let account_hash = AccountHash::default(); - let named_keys: NamedKeys = sample_named_keys(named_keys_len); - let main_purse = URef::default(); - let associated_keys = { - let mut tmp = AssociatedKeys::new(AccountHash::default(), Weight::new(1)); - (1..associated_keys_len).for_each(|i| { - tmp.add_key( - AccountHash::new([i; casper_types::account::ACCOUNT_HASH_LENGTH]), - Weight::new(1), - ) - .unwrap() - }); - tmp - }; - let action_thresholds = ActionThresholds::default(); - Account::new( - account_hash, - named_keys, - main_purse, - associated_keys, - action_thresholds, - ) -} - -fn serialize_account(b: &mut Bencher) { - let account = sample_account(10, 10); - b.iter(|| ToBytes::to_bytes(black_box(&account))); -} - -fn deserialize_account(b: &mut Bencher) { - let account = sample_account(10, 10); - let account_bytes = Account::to_bytes(&account).unwrap(); - b.iter(|| Account::from_bytes(black_box(&account_bytes)).unwrap()); -} - -fn serialize_contract(b: &mut Bencher) { - let contract = sample_contract(10, 10); - b.iter(|| ToBytes::to_bytes(black_box(&contract))); -} - -fn deserialize_contract(b: &mut Bencher) { - let contract = sample_contract(10, 10); - let contract_bytes = Contract::to_bytes(&contract).unwrap(); - b.iter(|| Contract::from_bytes(black_box(&contract_bytes)).unwrap()); -} - -fn sample_named_keys(len: u8) -> BTreeMap { - (0..len) - .map(|i| { - ( - format!("named-key-{}", i), - Key::Account(AccountHash::default()), - ) - }) - .collect() -} - -fn sample_contract(named_keys_len: u8, entry_points_len: u8) -> Contract { - let named_keys: NamedKeys = sample_named_keys(named_keys_len); - - let entry_points = { - let mut tmp = EntryPoints::default(); - (1..entry_points_len).for_each(|i| { - let args = vec![ - Parameter::new("first", CLType::U32), - Parameter::new("Foo", CLType::U32), - ]; - let entry_point = EntryPoint::new( - format!("test-{}", i), - args, - casper_types::CLType::U512, - EntryPointAccess::groups(&["Group 2"]), - EntryPointType::Contract, - ); - tmp.add_entry_point(entry_point); - }); - tmp - }; - - casper_types::contracts::Contract::new( - ContractPackageHash::default(), - ContractWasmHash::default(), - named_keys, - entry_points, - ProtocolVersion::default(), - ) -} - -fn contract_version_key_fn(i: u8) -> ContractVersionKey { - ContractVersionKey::new(i as u32, i as u32) -} - -fn contract_hash_fn(i: u8) -> ContractHash { - ContractHash::new([i; KEY_HASH_LENGTH]) -} - -fn sample_map(key_fn: FK, value_fn: FV, count: u8) -> BTreeMap -where - FK: Fn(u8) -> K, - FV: Fn(u8) -> V, -{ - (0..count) - .map(|i| { - let key = key_fn(i); - let value = value_fn(i); - (key, value) - }) - .collect() -} - -fn sample_set(fun: F, count: u8) -> BTreeSet -where - F: Fn(u8) -> K, -{ - (0..count).map(fun).collect() -} - -fn sample_group(i: u8) -> Group { - Group::new(format!("group-{}", i)) -} - -fn sample_uref(i: u8) -> URef { - URef::new([i; UREF_ADDR_LENGTH], AccessRights::all()) -} - -fn sample_contract_package( - contract_versions_len: u8, - disabled_versions_len: u8, - groups_len: u8, -) -> ContractPackage { - let access_key = URef::default(); - let versions = sample_map( - contract_version_key_fn, - contract_hash_fn, - contract_versions_len, - ); - let disabled_versions = sample_set(contract_version_key_fn, disabled_versions_len); - let groups = sample_map(sample_group, |_| sample_set(sample_uref, 3), groups_len); - - ContractPackage::new( - access_key, - versions, - disabled_versions, - groups, - ContractPackageStatus::Locked, - ) -} - -fn serialize_contract_package(b: &mut Bencher) { - let contract = sample_contract_package(5, 1, 5); - b.iter(|| ContractPackage::to_bytes(black_box(&contract))); -} - -fn deserialize_contract_package(b: &mut Bencher) { - let contract_package = sample_contract_package(5, 1, 5); - let contract_bytes = ContractPackage::to_bytes(&contract_package).unwrap(); - b.iter(|| ContractPackage::from_bytes(black_box(&contract_bytes)).unwrap()); -} - -fn u32_to_pk(i: u32) -> PublicKey { - let mut sk_bytes = [0u8; 32]; - U256::from(i).to_big_endian(&mut sk_bytes); - let sk = SecretKey::ed25519_from_bytes(sk_bytes).unwrap(); - PublicKey::from(&sk) -} - -fn sample_delegators(delegators_len: u32) -> Vec { - (0..delegators_len) - .map(|i| { - let delegator_pk = u32_to_pk(i); - let staked_amount = U512::from_dec_str("123123123123123").unwrap(); - let bonding_purse = URef::default(); - let validator_pk = u32_to_pk(i); - Delegator::unlocked(delegator_pk, staked_amount, bonding_purse, validator_pk) - }) - .collect() -} - -fn sample_bid(delegators_len: u32) -> Bid { - let validator_public_key = PublicKey::System; - let bonding_purse = URef::default(); - let staked_amount = U512::from_dec_str("123123123123123").unwrap(); - let delegation_rate = 10u8; - let mut bid = Bid::unlocked( - validator_public_key, - bonding_purse, - staked_amount, - delegation_rate, - ); - let new_delegators = sample_delegators(delegators_len); - - let curr_delegators = bid.delegators_mut(); - for delegator in new_delegators.into_iter() { - assert!(curr_delegators - .insert(delegator.delegator_public_key().clone(), delegator) - .is_none()); - } - bid -} - -fn serialize_bid(delegators_len: u32, b: &mut Bencher) { - let bid = sample_bid(delegators_len); - b.iter(|| Bid::to_bytes(black_box(&bid))); -} - -fn deserialize_bid(delegators_len: u32, b: &mut Bencher) { - let bid = sample_bid(delegators_len); - let bid_bytes = Bid::to_bytes(&bid).unwrap(); - b.iter(|| Bid::from_bytes(black_box(&bid_bytes))); -} - -fn sample_transfer() -> Transfer { - Transfer::new( - DeployHash::default(), - AccountHash::default(), - None, - URef::default(), - URef::default(), - U512::MAX, - U512::from_dec_str("123123123123").unwrap(), - Some(1u64), - ) -} - -fn serialize_transfer(b: &mut Bencher) { - let transfer = sample_transfer(); - b.iter(|| Transfer::to_bytes(&transfer)); -} - -fn deserialize_transfer(b: &mut Bencher) { - let transfer = sample_transfer(); - let transfer_bytes = transfer.to_bytes().unwrap(); - b.iter(|| Transfer::from_bytes(&transfer_bytes)); -} - -fn sample_deploy_info(transfer_len: u16) -> DeployInfo { - let transfers = (0..transfer_len) - .map(|i| { - let mut tmp = [0u8; TRANSFER_ADDR_LENGTH]; - U256::from(i).to_little_endian(&mut tmp); - TransferAddr::new(tmp) - }) - .collect::>(); - DeployInfo::new( - DeployHash::default(), - &transfers, - AccountHash::default(), - URef::default(), - U512::MAX, - ) -} - -fn serialize_deploy_info(b: &mut Bencher) { - let deploy_info = sample_deploy_info(1000); - b.iter(|| DeployInfo::to_bytes(&deploy_info)); -} - -fn deserialize_deploy_info(b: &mut Bencher) { - let deploy_info = sample_deploy_info(1000); - let deploy_bytes = deploy_info.to_bytes().unwrap(); - b.iter(|| DeployInfo::from_bytes(&deploy_bytes)); -} - -fn sample_era_info(delegators_len: u32) -> EraInfo { - let mut base = EraInfo::new(); - let delegations = (0..delegators_len).map(|i| { - let pk = u32_to_pk(i); - SeigniorageAllocation::delegator(pk.clone(), pk, U512::MAX) - }); - base.seigniorage_allocations_mut().extend(delegations); - base -} - -fn serialize_era_info(delegators_len: u32, b: &mut Bencher) { - let era_info = sample_era_info(delegators_len); - b.iter(|| EraInfo::to_bytes(&era_info)); -} - -fn deserialize_era_info(delegators_len: u32, b: &mut Bencher) { - let era_info = sample_era_info(delegators_len); - let era_info_bytes = era_info.to_bytes().unwrap(); - b.iter(|| EraInfo::from_bytes(&era_info_bytes)); -} - -fn bytesrepr_bench(c: &mut Criterion) { - c.bench_function("serialize_vector_of_i32s", serialize_vector_of_i32s); - c.bench_function("deserialize_vector_of_i32s", deserialize_vector_of_i32s); - c.bench_function("serialize_vector_of_u8", serialize_vector_of_u8); - c.bench_function("deserialize_vector_of_u8", deserialize_vector_of_u8); - c.bench_function("serialize_u8", serialize_u8); - c.bench_function("deserialize_u8", deserialize_u8); - c.bench_function("serialize_i32", serialize_i32); - c.bench_function("deserialize_i32", deserialize_i32); - c.bench_function("serialize_u64", serialize_u64); - c.bench_function("deserialize_u64", deserialize_u64); - c.bench_function("serialize_some_u64", serialize_some_u64); - c.bench_function("deserialize_some_u64", deserialize_some_u64); - c.bench_function("serialize_none_u64", serialize_none_u64); - c.bench_function("deserialize_ok_u64", deserialize_ok_u64); - c.bench_function( - "serialize_vector_of_vector_of_u8", - serialize_vector_of_vector_of_u8, - ); - c.bench_function( - "deserialize_vector_of_vector_of_u8", - deserialize_vector_of_vector_of_u8, - ); - c.bench_function("serialize_tree_map", serialize_tree_map); - c.bench_function("deserialize_treemap", deserialize_treemap); - c.bench_function("serialize_string", serialize_string); - c.bench_function("deserialize_string", deserialize_string); - c.bench_function("serialize_vec_of_string", serialize_vec_of_string); - c.bench_function("deserialize_vec_of_string", deserialize_vec_of_string); - c.bench_function("serialize_unit", serialize_unit); - c.bench_function("deserialize_unit", deserialize_unit); - c.bench_function("serialize_key_account", serialize_key_account); - c.bench_function("deserialize_key_account", deserialize_key_account); - c.bench_function("serialize_key_hash", serialize_key_hash); - c.bench_function("deserialize_key_hash", deserialize_key_hash); - c.bench_function("serialize_key_uref", serialize_key_uref); - c.bench_function("deserialize_key_uref", deserialize_key_uref); - c.bench_function("serialize_vec_of_keys", serialize_vec_of_keys); - c.bench_function("deserialize_vec_of_keys", deserialize_vec_of_keys); - c.bench_function("serialize_access_rights_read", serialize_access_rights_read); - c.bench_function( - "deserialize_access_rights_read", - deserialize_access_rights_read, - ); - c.bench_function( - "serialize_access_rights_write", - serialize_access_rights_write, - ); - c.bench_function( - "deserialize_access_rights_write", - deserialize_access_rights_write, - ); - c.bench_function("serialize_access_rights_add", serialize_access_rights_add); - c.bench_function( - "deserialize_access_rights_add", - deserialize_access_rights_add, - ); - c.bench_function( - "serialize_access_rights_read_add", - serialize_access_rights_read_add, - ); - c.bench_function( - "deserialize_access_rights_read_add", - deserialize_access_rights_read_add, - ); - c.bench_function( - "serialize_access_rights_read_write", - serialize_access_rights_read_write, - ); - c.bench_function( - "deserialize_access_rights_read_write", - deserialize_access_rights_read_write, - ); - c.bench_function( - "serialize_access_rights_add_write", - serialize_access_rights_add_write, - ); - c.bench_function( - "deserialize_access_rights_add_write", - deserialize_access_rights_add_write, - ); - c.bench_function("serialize_cl_value_int32", serialize_cl_value_int32); - c.bench_function("deserialize_cl_value_int32", deserialize_cl_value_int32); - c.bench_function("serialize_cl_value_uint128", serialize_cl_value_uint128); - c.bench_function("deserialize_cl_value_uint128", deserialize_cl_value_uint128); - c.bench_function("serialize_cl_value_uint256", serialize_cl_value_uint256); - c.bench_function("deserialize_cl_value_uint256", deserialize_cl_value_uint256); - c.bench_function("serialize_cl_value_uint512", serialize_cl_value_uint512); - c.bench_function("deserialize_cl_value_uint512", deserialize_cl_value_uint512); - c.bench_function("serialize_cl_value_bytearray", serialize_cl_value_bytearray); - c.bench_function( - "deserialize_cl_value_bytearray", - deserialize_cl_value_bytearray, - ); - c.bench_function("serialize_cl_value_listint32", serialize_cl_value_listint32); - c.bench_function( - "deserialize_cl_value_listint32", - deserialize_cl_value_listint32, - ); - c.bench_function("serialize_cl_value_string", serialize_cl_value_string); - c.bench_function("deserialize_cl_value_string", deserialize_cl_value_string); - c.bench_function( - "serialize_cl_value_liststring", - serialize_cl_value_liststring, - ); - c.bench_function( - "deserialize_cl_value_liststring", - deserialize_cl_value_liststring, - ); - c.bench_function("serialize_cl_value_namedkey", serialize_cl_value_namedkey); - c.bench_function( - "deserialize_cl_value_namedkey", - deserialize_cl_value_namedkey, - ); - c.bench_function("serialize_u128", serialize_u128); - c.bench_function("deserialize_u128", deserialize_u128); - c.bench_function("serialize_u256", serialize_u256); - c.bench_function("deserialize_u256", deserialize_u256); - c.bench_function("serialize_u512", serialize_u512); - c.bench_function("deserialize_u512", deserialize_u512); - c.bench_function("bytesrepr::serialize_account", serialize_account); - c.bench_function("bytesrepr::deserialize_account", deserialize_account); - c.bench_function("bytesrepr::serialize_contract", serialize_contract); - c.bench_function("bytesrepr::deserialize_contract", deserialize_contract); - c.bench_function( - "bytesrepr::serialize_contract_package", - serialize_contract_package, - ); - c.bench_function( - "bytesrepr::deserialize_contract_package", - deserialize_contract_package, - ); - c.bench_function("bytesrepr::serialize_bid_small", |b| serialize_bid(10, b)); - c.bench_function("bytesrepr::serialize_bid_medium", |b| serialize_bid(100, b)); - c.bench_function("bytesrepr::serialize_bid_big", |b| serialize_bid(1000, b)); - c.bench_function("bytesrepr::deserialize_bid_small", |b| { - deserialize_bid(10, b) - }); - c.bench_function("bytesrepr::deserialize_bid_medium", |b| { - deserialize_bid(100, b) - }); - c.bench_function("bytesrepr::deserialize_bid_big", |b| { - deserialize_bid(1000, b) - }); - c.bench_function("bytesrepr::serialize_transfer", serialize_transfer); - c.bench_function("bytesrepr::deserialize_transfer", deserialize_transfer); - c.bench_function("bytesrepr::serialize_deploy_info", serialize_deploy_info); - c.bench_function( - "bytesrepr::deserialize_deploy_info", - deserialize_deploy_info, - ); - c.bench_function("bytesrepr::serialize_era_info", |b| { - serialize_era_info(500, b) - }); - c.bench_function("bytesrepr::deserialize_era_info", |b| { - deserialize_era_info(500, b) - }); -} - -criterion_group!(benches, bytesrepr_bench); -criterion_main!(benches); diff --git a/casper_types/src/access_rights.rs b/casper_types/src/access_rights.rs deleted file mode 100644 index e138f2f4..00000000 --- a/casper_types/src/access_rights.rs +++ /dev/null @@ -1,422 +0,0 @@ -use alloc::{ - collections::{btree_map::Entry, BTreeMap}, - vec::Vec, -}; -use core::fmt::{self, Display, Formatter}; - -use rand::{ - distributions::{Distribution, Standard}, - Rng, -}; -use serde::{de::Error as SerdeError, Deserialize, Deserializer, Serialize, Serializer}; - -use crate::{bytesrepr, Key, URef, URefAddr}; -pub use private::AccessRights; - -/// The number of bytes in a serialized [`AccessRights`]. -pub const ACCESS_RIGHTS_SERIALIZED_LENGTH: usize = 1; - -// Module exists only to restrict the scope of the following `#allow`. -#[allow(clippy::bad_bit_mask)] -mod private { - use bitflags::bitflags; - #[cfg(feature = "datasize")] - use datasize::DataSize; - - bitflags! { - /// A struct which behaves like a set of bitflags to define access rights associated with a - /// [`URef`](crate::URef). - #[allow(clippy::derived_hash_with_manual_eq)] - #[cfg_attr(feature = "datasize", derive(DataSize))] - pub struct AccessRights: u8 { - /// No permissions - const NONE = 0; - /// Permission to read the value under the associated `URef`. - const READ = 0b001; - /// Permission to write a value under the associated `URef`. - const WRITE = 0b010; - /// Permission to add to the value under the associated `URef`. - const ADD = 0b100; - /// Permission to read or add to the value under the associated `URef`. - const READ_ADD = Self::READ.bits | Self::ADD.bits; - /// Permission to read or write the value under the associated `URef`. - const READ_WRITE = Self::READ.bits | Self::WRITE.bits; - /// Permission to add to, or write the value under the associated `URef`. - const ADD_WRITE = Self::ADD.bits | Self::WRITE.bits; - /// Permission to read, add to, or write the value under the associated `URef`. - const READ_ADD_WRITE = Self::READ.bits | Self::ADD.bits | Self::WRITE.bits; - } - } -} - -impl Default for AccessRights { - fn default() -> Self { - AccessRights::NONE - } -} - -impl AccessRights { - /// Returns `true` if the `READ` flag is set. - pub fn is_readable(self) -> bool { - self & AccessRights::READ == AccessRights::READ - } - - /// Returns `true` if the `WRITE` flag is set. - pub fn is_writeable(self) -> bool { - self & AccessRights::WRITE == AccessRights::WRITE - } - - /// Returns `true` if the `ADD` flag is set. - pub fn is_addable(self) -> bool { - self & AccessRights::ADD == AccessRights::ADD - } - - /// Returns `true` if no flags are set. - pub fn is_none(self) -> bool { - self == AccessRights::NONE - } -} - -impl Display for AccessRights { - fn fmt(&self, f: &mut Formatter) -> fmt::Result { - match *self { - AccessRights::NONE => write!(f, "NONE"), - AccessRights::READ => write!(f, "READ"), - AccessRights::WRITE => write!(f, "WRITE"), - AccessRights::ADD => write!(f, "ADD"), - AccessRights::READ_ADD => write!(f, "READ_ADD"), - AccessRights::READ_WRITE => write!(f, "READ_WRITE"), - AccessRights::ADD_WRITE => write!(f, "ADD_WRITE"), - AccessRights::READ_ADD_WRITE => write!(f, "READ_ADD_WRITE"), - _ => write!(f, "UNKNOWN"), - } - } -} - -impl bytesrepr::ToBytes for AccessRights { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - self.bits().to_bytes() - } - - fn serialized_length(&self) -> usize { - ACCESS_RIGHTS_SERIALIZED_LENGTH - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - writer.push(self.bits()); - Ok(()) - } -} - -impl bytesrepr::FromBytes for AccessRights { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (id, rem) = u8::from_bytes(bytes)?; - match AccessRights::from_bits(id) { - Some(rights) => Ok((rights, rem)), - None => Err(bytesrepr::Error::Formatting), - } - } -} - -impl Serialize for AccessRights { - fn serialize(&self, serializer: S) -> Result { - self.bits().serialize(serializer) - } -} - -impl<'de> Deserialize<'de> for AccessRights { - fn deserialize>(deserializer: D) -> Result { - let bits = u8::deserialize(deserializer)?; - AccessRights::from_bits(bits).ok_or_else(|| SerdeError::custom("invalid bits")) - } -} - -impl Distribution for Standard { - fn sample(&self, rng: &mut R) -> AccessRights { - let mut result = AccessRights::NONE; - if rng.gen() { - result |= AccessRights::READ; - } - if rng.gen() { - result |= AccessRights::WRITE; - } - if rng.gen() { - result |= AccessRights::ADD; - } - result - } -} - -/// Used to indicate if a granted [`URef`] was already held by the context. -#[derive(Debug, PartialEq, Eq)] -pub enum GrantedAccess { - /// No new set of access rights were granted. - PreExisting, - /// A new set of access rights were granted. - Granted { - /// The address of the URef. - uref_addr: URefAddr, - /// The set of the newly granted access rights. - newly_granted_access_rights: AccessRights, - }, -} - -/// Access rights for a given runtime context. -#[derive(Debug, PartialEq, Eq)] -pub struct ContextAccessRights { - context_key: Key, - access_rights: BTreeMap, -} - -impl ContextAccessRights { - /// Creates a new instance of access rights from an iterator of URefs merging any duplicates, - /// taking the union of their rights. - pub fn new>(context_key: Key, uref_iter: T) -> Self { - let mut context_access_rights = ContextAccessRights { - context_key, - access_rights: BTreeMap::new(), - }; - context_access_rights.do_extend(uref_iter); - context_access_rights - } - - /// Returns the current context key. - pub fn context_key(&self) -> Key { - self.context_key - } - - /// Extends the current access rights from a given set of URefs. - pub fn extend(&mut self, urefs: &[URef]) { - self.do_extend(urefs.iter().copied()) - } - - /// Extends the current access rights from a given set of URefs. - fn do_extend>(&mut self, uref_iter: T) { - for uref in uref_iter { - match self.access_rights.entry(uref.addr()) { - Entry::Occupied(rights) => { - *rights.into_mut() = rights.get().union(uref.access_rights()); - } - Entry::Vacant(rights) => { - rights.insert(uref.access_rights()); - } - } - } - } - - /// Checks whether given uref has enough access rights. - pub fn has_access_rights_to_uref(&self, uref: &URef) -> bool { - if let Some(known_rights) = self.access_rights.get(&uref.addr()) { - let rights_to_check = uref.access_rights(); - known_rights.contains(rights_to_check) - } else { - // URef is not known - false - } - } - - /// Grants access to a [`URef`]; unless access was pre-existing. - pub fn grant_access(&mut self, uref: URef) -> GrantedAccess { - match self.access_rights.entry(uref.addr()) { - Entry::Occupied(existing_rights) => { - let newly_granted_access_rights = - uref.access_rights().difference(*existing_rights.get()); - *existing_rights.into_mut() = existing_rights.get().union(uref.access_rights()); - if newly_granted_access_rights.is_none() { - GrantedAccess::PreExisting - } else { - GrantedAccess::Granted { - uref_addr: uref.addr(), - newly_granted_access_rights, - } - } - } - Entry::Vacant(rights) => { - rights.insert(uref.access_rights()); - GrantedAccess::Granted { - uref_addr: uref.addr(), - newly_granted_access_rights: uref.access_rights(), - } - } - } - } - - /// Remove access for a given `URef`. - pub fn remove_access(&mut self, uref_addr: URefAddr, access_rights: AccessRights) { - if let Some(current_access_rights) = self.access_rights.get_mut(&uref_addr) { - current_access_rights.remove(access_rights) - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::UREF_ADDR_LENGTH; - - const UREF_ADDRESS: [u8; UREF_ADDR_LENGTH] = [1; UREF_ADDR_LENGTH]; - const KEY: Key = Key::URef(URef::new(UREF_ADDRESS, AccessRights::empty())); - const UREF_NO_PERMISSIONS: URef = URef::new(UREF_ADDRESS, AccessRights::empty()); - const UREF_READ: URef = URef::new(UREF_ADDRESS, AccessRights::READ); - const UREF_ADD: URef = URef::new(UREF_ADDRESS, AccessRights::ADD); - const UREF_WRITE: URef = URef::new(UREF_ADDRESS, AccessRights::WRITE); - const UREF_READ_ADD: URef = URef::new(UREF_ADDRESS, AccessRights::READ_ADD); - const UREF_READ_ADD_WRITE: URef = URef::new(UREF_ADDRESS, AccessRights::READ_ADD_WRITE); - - fn test_readable(right: AccessRights, is_true: bool) { - assert_eq!(right.is_readable(), is_true) - } - - #[test] - fn test_is_readable() { - test_readable(AccessRights::READ, true); - test_readable(AccessRights::READ_ADD, true); - test_readable(AccessRights::READ_WRITE, true); - test_readable(AccessRights::READ_ADD_WRITE, true); - test_readable(AccessRights::ADD, false); - test_readable(AccessRights::ADD_WRITE, false); - test_readable(AccessRights::WRITE, false); - } - - fn test_writable(right: AccessRights, is_true: bool) { - assert_eq!(right.is_writeable(), is_true) - } - - #[test] - fn test_is_writable() { - test_writable(AccessRights::WRITE, true); - test_writable(AccessRights::READ_WRITE, true); - test_writable(AccessRights::ADD_WRITE, true); - test_writable(AccessRights::READ, false); - test_writable(AccessRights::ADD, false); - test_writable(AccessRights::READ_ADD, false); - test_writable(AccessRights::READ_ADD_WRITE, true); - } - - fn test_addable(right: AccessRights, is_true: bool) { - assert_eq!(right.is_addable(), is_true) - } - - #[test] - fn test_is_addable() { - test_addable(AccessRights::ADD, true); - test_addable(AccessRights::READ_ADD, true); - test_addable(AccessRights::READ_WRITE, false); - test_addable(AccessRights::ADD_WRITE, true); - test_addable(AccessRights::READ, false); - test_addable(AccessRights::WRITE, false); - test_addable(AccessRights::READ_ADD_WRITE, true); - } - - #[test] - fn should_check_has_access_rights_to_uref() { - let context_rights = ContextAccessRights::new(KEY, vec![UREF_READ_ADD]); - assert!(context_rights.has_access_rights_to_uref(&UREF_READ_ADD)); - assert!(context_rights.has_access_rights_to_uref(&UREF_READ)); - assert!(context_rights.has_access_rights_to_uref(&UREF_ADD)); - assert!(context_rights.has_access_rights_to_uref(&UREF_NO_PERMISSIONS)); - } - - #[test] - fn should_check_does_not_have_access_rights_to_uref() { - let context_rights = ContextAccessRights::new(KEY, vec![UREF_READ_ADD]); - assert!(!context_rights.has_access_rights_to_uref(&UREF_READ_ADD_WRITE)); - assert!(!context_rights - .has_access_rights_to_uref(&URef::new([2; UREF_ADDR_LENGTH], AccessRights::empty()))); - } - - #[test] - fn should_extend_access_rights() { - // Start with uref with no permissions. - let mut context_rights = ContextAccessRights::new(KEY, vec![UREF_NO_PERMISSIONS]); - let mut expected_rights = BTreeMap::new(); - expected_rights.insert(UREF_ADDRESS, AccessRights::empty()); - assert_eq!(context_rights.access_rights, expected_rights); - - // Extend with a READ_ADD: should merge to single READ_ADD. - context_rights.extend(&[UREF_READ_ADD]); - *expected_rights.get_mut(&UREF_ADDRESS).unwrap() = AccessRights::READ_ADD; - assert_eq!(context_rights.access_rights, expected_rights); - - // Extend with a READ: should have no observable effect. - context_rights.extend(&[UREF_READ]); - assert_eq!(context_rights.access_rights, expected_rights); - - // Extend with a WRITE: should merge to single READ_ADD_WRITE. - context_rights.extend(&[UREF_WRITE]); - *expected_rights.get_mut(&UREF_ADDRESS).unwrap() = AccessRights::READ_ADD_WRITE; - assert_eq!(context_rights.access_rights, expected_rights); - } - - #[test] - fn should_perform_union_of_access_rights_in_new() { - let context_rights = - ContextAccessRights::new(KEY, vec![UREF_NO_PERMISSIONS, UREF_READ, UREF_ADD]); - - // Expect the three discrete URefs' rights to be unioned into READ_ADD. - let mut expected_rights = BTreeMap::new(); - expected_rights.insert(UREF_ADDRESS, AccessRights::READ_ADD); - assert_eq!(context_rights.access_rights, expected_rights); - } - - #[test] - fn should_grant_access_rights() { - let mut context_rights = ContextAccessRights::new(KEY, vec![UREF_READ_ADD]); - let granted_access = context_rights.grant_access(UREF_READ); - assert_eq!(granted_access, GrantedAccess::PreExisting); - let granted_access = context_rights.grant_access(UREF_READ_ADD_WRITE); - assert_eq!( - granted_access, - GrantedAccess::Granted { - uref_addr: UREF_ADDRESS, - newly_granted_access_rights: AccessRights::WRITE - } - ); - assert!(context_rights.has_access_rights_to_uref(&UREF_READ_ADD_WRITE)); - let new_uref = URef::new([3; 32], AccessRights::all()); - let granted_access = context_rights.grant_access(new_uref); - assert_eq!( - granted_access, - GrantedAccess::Granted { - uref_addr: new_uref.addr(), - newly_granted_access_rights: AccessRights::all() - } - ); - assert!(context_rights.has_access_rights_to_uref(&new_uref)); - } - - #[test] - fn should_remove_access_rights() { - let mut context_rights = ContextAccessRights::new(KEY, vec![UREF_READ_ADD_WRITE]); - assert!(context_rights.has_access_rights_to_uref(&UREF_READ_ADD_WRITE)); - - // Strip write access from the context rights. - context_rights.remove_access(UREF_ADDRESS, AccessRights::WRITE); - assert!( - !context_rights.has_access_rights_to_uref(&UREF_READ_ADD_WRITE), - "Write access should have been removed" - ); - - // Strip the access again to ensure that the bit is not flipped back. - context_rights.remove_access(UREF_ADDRESS, AccessRights::WRITE); - assert!( - !context_rights.has_access_rights_to_uref(&UREF_READ_ADD_WRITE), - "Write access should not have been granted back" - ); - assert!( - context_rights.has_access_rights_to_uref(&UREF_READ_ADD), - "Read and add access should be preserved." - ); - - // Strip both read and add access from the context rights. - context_rights.remove_access(UREF_ADDRESS, AccessRights::READ_ADD); - assert!( - !context_rights.has_access_rights_to_uref(&UREF_READ_ADD), - "Read and add access should have been removed" - ); - assert!( - context_rights.has_access_rights_to_uref(&UREF_NO_PERMISSIONS), - "The access rights should be empty" - ); - } -} diff --git a/casper_types/src/account.rs b/casper_types/src/account.rs deleted file mode 100644 index f07892f0..00000000 --- a/casper_types/src/account.rs +++ /dev/null @@ -1,1013 +0,0 @@ -//! Contains types and constants associated with user accounts. - -mod account_hash; -pub mod action_thresholds; -mod action_type; -pub mod associated_keys; -mod error; -mod weight; - -use serde::Serialize; - -use alloc::{collections::BTreeSet, vec::Vec}; -use core::{ - convert::TryFrom, - fmt::{self, Debug, Display, Formatter}, - iter, -}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; - -pub use self::{ - account_hash::{AccountHash, ACCOUNT_HASH_FORMATTED_STRING_PREFIX, ACCOUNT_HASH_LENGTH}, - action_thresholds::ActionThresholds, - action_type::ActionType, - associated_keys::AssociatedKeys, - error::{FromStrError, SetThresholdFailure, TryFromIntError, TryFromSliceForAccountHashError}, - weight::{Weight, WEIGHT_SERIALIZED_LENGTH}, -}; -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - contracts::NamedKeys, - crypto, AccessRights, ContextAccessRights, Key, URef, BLAKE2B_DIGEST_LENGTH, -}; - -/// Represents an Account in the global state. -#[derive(PartialEq, Eq, Clone, Debug, Serialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -pub struct Account { - account_hash: AccountHash, - named_keys: NamedKeys, - main_purse: URef, - associated_keys: AssociatedKeys, - action_thresholds: ActionThresholds, -} - -impl Account { - /// Creates a new account. - pub fn new( - account_hash: AccountHash, - named_keys: NamedKeys, - main_purse: URef, - associated_keys: AssociatedKeys, - action_thresholds: ActionThresholds, - ) -> Self { - Account { - account_hash, - named_keys, - main_purse, - associated_keys, - action_thresholds, - } - } - - /// An Account constructor with presets for associated_keys and action_thresholds. - /// - /// An account created with this method is valid and can be used as the target of a transaction. - /// It will be created with an [`AssociatedKeys`] with a [`Weight`] of 1, and a default - /// [`ActionThresholds`]. - pub fn create(account: AccountHash, named_keys: NamedKeys, main_purse: URef) -> Self { - let associated_keys = AssociatedKeys::new(account, Weight::new(1)); - - let action_thresholds: ActionThresholds = Default::default(); - Account::new( - account, - named_keys, - main_purse, - associated_keys, - action_thresholds, - ) - } - - /// Extracts the access rights from the named keys and main purse of the account. - pub fn extract_access_rights(&self) -> ContextAccessRights { - let urefs_iter = self - .named_keys - .values() - .filter_map(|key| key.as_uref().copied()) - .chain(iter::once(self.main_purse)); - ContextAccessRights::new(Key::from(self.account_hash), urefs_iter) - } - - /// Appends named keys to an account's named_keys field. - pub fn named_keys_append(&mut self, keys: &mut NamedKeys) { - self.named_keys.append(keys); - } - - /// Returns named keys. - pub fn named_keys(&self) -> &NamedKeys { - &self.named_keys - } - - /// Returns a mutable reference to named keys. - pub fn named_keys_mut(&mut self) -> &mut NamedKeys { - &mut self.named_keys - } - - /// Returns account hash. - pub fn account_hash(&self) -> AccountHash { - self.account_hash - } - - /// Returns main purse. - pub fn main_purse(&self) -> URef { - self.main_purse - } - - /// Returns an [`AccessRights::ADD`]-only version of the main purse's [`URef`]. - pub fn main_purse_add_only(&self) -> URef { - URef::new(self.main_purse.addr(), AccessRights::ADD) - } - - /// Returns associated keys. - pub fn associated_keys(&self) -> &AssociatedKeys { - &self.associated_keys - } - - /// Returns action thresholds. - pub fn action_thresholds(&self) -> &ActionThresholds { - &self.action_thresholds - } - - /// Adds an associated key to an account. - pub fn add_associated_key( - &mut self, - account_hash: AccountHash, - weight: Weight, - ) -> Result<(), AddKeyFailure> { - self.associated_keys.add_key(account_hash, weight) - } - - /// Checks if removing given key would properly satisfy thresholds. - fn can_remove_key(&self, account_hash: AccountHash) -> bool { - let total_weight_without = self - .associated_keys - .total_keys_weight_excluding(account_hash); - - // Returns true if the total weight calculated without given public key would be greater or - // equal to all of the thresholds. - total_weight_without >= *self.action_thresholds().deployment() - && total_weight_without >= *self.action_thresholds().key_management() - } - - /// Checks if adding a weight to a sum of all weights excluding the given key would make the - /// resulting value to fall below any of the thresholds on account. - fn can_update_key(&self, account_hash: AccountHash, weight: Weight) -> bool { - // Calculates total weight of all keys excluding the given key - let total_weight = self - .associated_keys - .total_keys_weight_excluding(account_hash); - - // Safely calculate new weight by adding the updated weight - let new_weight = total_weight.value().saturating_add(weight.value()); - - // Returns true if the new weight would be greater or equal to all of - // the thresholds. - new_weight >= self.action_thresholds().deployment().value() - && new_weight >= self.action_thresholds().key_management().value() - } - - /// Removes an associated key from an account. - /// - /// Verifies that removing the key will not cause the remaining weight to fall below any action - /// thresholds. - pub fn remove_associated_key( - &mut self, - account_hash: AccountHash, - ) -> Result<(), RemoveKeyFailure> { - if self.associated_keys.contains_key(&account_hash) { - // Check if removing this weight would fall below thresholds - if !self.can_remove_key(account_hash) { - return Err(RemoveKeyFailure::ThresholdViolation); - } - } - self.associated_keys.remove_key(&account_hash) - } - - /// Updates an associated key. - /// - /// Returns an error if the update would result in a violation of the key management thresholds. - pub fn update_associated_key( - &mut self, - account_hash: AccountHash, - weight: Weight, - ) -> Result<(), UpdateKeyFailure> { - if let Some(current_weight) = self.associated_keys.get(&account_hash) { - if weight < *current_weight { - // New weight is smaller than current weight - if !self.can_update_key(account_hash, weight) { - return Err(UpdateKeyFailure::ThresholdViolation); - } - } - } - self.associated_keys.update_key(account_hash, weight) - } - - /// Sets a new action threshold for a given action type for the account. - /// - /// Returns an error if the new action threshold weight is greater than the total weight of the - /// account's associated keys. - pub fn set_action_threshold( - &mut self, - action_type: ActionType, - weight: Weight, - ) -> Result<(), SetThresholdFailure> { - // Verify if new threshold weight exceeds total weight of all associated - // keys. - self.can_set_threshold(weight)?; - // Set new weight for given action - self.action_thresholds.set_threshold(action_type, weight) - } - - /// Verifies if user can set action threshold. - pub fn can_set_threshold(&self, new_threshold: Weight) -> Result<(), SetThresholdFailure> { - let total_weight = self.associated_keys.total_keys_weight(); - if new_threshold > total_weight { - return Err(SetThresholdFailure::InsufficientTotalWeight); - } - Ok(()) - } - - /// Sets a new action threshold for a given action type for the account without checking against - /// the total weight of the associated keys. - /// - /// This should only be called when authorized by an administrator account. - /// - /// Returns an error if setting the action would cause the `ActionType::Deployment` threshold to - /// be greater than any of the other action types. - pub fn set_action_threshold_unchecked( - &mut self, - action_type: ActionType, - threshold: Weight, - ) -> Result<(), SetThresholdFailure> { - self.action_thresholds.set_threshold(action_type, threshold) - } - - /// Checks whether all authorization keys are associated with this account. - pub fn can_authorize(&self, authorization_keys: &BTreeSet) -> bool { - !authorization_keys.is_empty() - && authorization_keys - .iter() - .all(|e| self.associated_keys.contains_key(e)) - } - - /// Checks whether the sum of the weights of all authorization keys is - /// greater or equal to deploy threshold. - pub fn can_deploy_with(&self, authorization_keys: &BTreeSet) -> bool { - let total_weight = self - .associated_keys - .calculate_keys_weight(authorization_keys); - - total_weight >= *self.action_thresholds().deployment() - } - - /// Checks whether the sum of the weights of all authorization keys is - /// greater or equal to key management threshold. - pub fn can_manage_keys_with(&self, authorization_keys: &BTreeSet) -> bool { - let total_weight = self - .associated_keys - .calculate_keys_weight(authorization_keys); - - total_weight >= *self.action_thresholds().key_management() - } -} - -impl ToBytes for Account { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut result = bytesrepr::allocate_buffer(self)?; - self.account_hash().write_bytes(&mut result)?; - self.named_keys().write_bytes(&mut result)?; - self.main_purse.write_bytes(&mut result)?; - self.associated_keys().write_bytes(&mut result)?; - self.action_thresholds().write_bytes(&mut result)?; - Ok(result) - } - - fn serialized_length(&self) -> usize { - self.account_hash.serialized_length() - + self.named_keys.serialized_length() - + self.main_purse.serialized_length() - + self.associated_keys.serialized_length() - + self.action_thresholds.serialized_length() - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.account_hash().write_bytes(writer)?; - self.named_keys().write_bytes(writer)?; - self.main_purse().write_bytes(writer)?; - self.associated_keys().write_bytes(writer)?; - self.action_thresholds().write_bytes(writer)?; - Ok(()) - } -} - -impl FromBytes for Account { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (account_hash, rem) = AccountHash::from_bytes(bytes)?; - let (named_keys, rem) = NamedKeys::from_bytes(rem)?; - let (main_purse, rem) = URef::from_bytes(rem)?; - let (associated_keys, rem) = AssociatedKeys::from_bytes(rem)?; - let (action_thresholds, rem) = ActionThresholds::from_bytes(rem)?; - Ok(( - Account { - account_hash, - named_keys, - main_purse, - associated_keys, - action_thresholds, - }, - rem, - )) - } -} - -#[doc(hidden)] -#[deprecated( - since = "1.4.4", - note = "function moved to casper_types::crypto::blake2b" -)] -pub fn blake2b>(data: T) -> [u8; BLAKE2B_DIGEST_LENGTH] { - crypto::blake2b(data) -} - -/// Errors that can occur while adding a new [`AccountHash`] to an account's associated keys map. -#[derive(PartialEq, Eq, Debug, Copy, Clone)] -#[repr(i32)] -#[non_exhaustive] -pub enum AddKeyFailure { - /// There are already maximum [`AccountHash`]s associated with the given account. - MaxKeysLimit = 1, - /// The given [`AccountHash`] is already associated with the given account. - DuplicateKey = 2, - /// Caller doesn't have sufficient permissions to associate a new [`AccountHash`] with the - /// given account. - PermissionDenied = 3, -} - -impl Display for AddKeyFailure { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - match self { - AddKeyFailure::MaxKeysLimit => formatter.write_str( - "Unable to add new associated key because maximum amount of keys is reached", - ), - AddKeyFailure::DuplicateKey => formatter - .write_str("Unable to add new associated key because given key already exists"), - AddKeyFailure::PermissionDenied => formatter - .write_str("Unable to add new associated key due to insufficient permissions"), - } - } -} - -// This conversion is not intended to be used by third party crates. -#[doc(hidden)] -impl TryFrom for AddKeyFailure { - type Error = TryFromIntError; - - fn try_from(value: i32) -> Result { - match value { - d if d == AddKeyFailure::MaxKeysLimit as i32 => Ok(AddKeyFailure::MaxKeysLimit), - d if d == AddKeyFailure::DuplicateKey as i32 => Ok(AddKeyFailure::DuplicateKey), - d if d == AddKeyFailure::PermissionDenied as i32 => Ok(AddKeyFailure::PermissionDenied), - _ => Err(TryFromIntError(())), - } - } -} - -/// Errors that can occur while removing a [`AccountHash`] from an account's associated keys map. -#[derive(Debug, Eq, PartialEq, Copy, Clone)] -#[repr(i32)] -#[non_exhaustive] -pub enum RemoveKeyFailure { - /// The given [`AccountHash`] is not associated with the given account. - MissingKey = 1, - /// Caller doesn't have sufficient permissions to remove an associated [`AccountHash`] from the - /// given account. - PermissionDenied = 2, - /// Removing the given associated [`AccountHash`] would cause the total weight of all remaining - /// `AccountHash`s to fall below one of the action thresholds for the given account. - ThresholdViolation = 3, -} - -impl Display for RemoveKeyFailure { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - match self { - RemoveKeyFailure::MissingKey => { - formatter.write_str("Unable to remove a key that does not exist") - } - RemoveKeyFailure::PermissionDenied => formatter - .write_str("Unable to remove associated key due to insufficient permissions"), - RemoveKeyFailure::ThresholdViolation => formatter.write_str( - "Unable to remove a key which would violate action threshold constraints", - ), - } - } -} - -// This conversion is not intended to be used by third party crates. -#[doc(hidden)] -impl TryFrom for RemoveKeyFailure { - type Error = TryFromIntError; - - fn try_from(value: i32) -> Result { - match value { - d if d == RemoveKeyFailure::MissingKey as i32 => Ok(RemoveKeyFailure::MissingKey), - d if d == RemoveKeyFailure::PermissionDenied as i32 => { - Ok(RemoveKeyFailure::PermissionDenied) - } - d if d == RemoveKeyFailure::ThresholdViolation as i32 => { - Ok(RemoveKeyFailure::ThresholdViolation) - } - _ => Err(TryFromIntError(())), - } - } -} - -/// Errors that can occur while updating the [`Weight`] of a [`AccountHash`] in an account's -/// associated keys map. -#[derive(PartialEq, Eq, Debug, Copy, Clone)] -#[repr(i32)] -#[non_exhaustive] -pub enum UpdateKeyFailure { - /// The given [`AccountHash`] is not associated with the given account. - MissingKey = 1, - /// Caller doesn't have sufficient permissions to update an associated [`AccountHash`] from the - /// given account. - PermissionDenied = 2, - /// Updating the [`Weight`] of the given associated [`AccountHash`] would cause the total - /// weight of all `AccountHash`s to fall below one of the action thresholds for the given - /// account. - ThresholdViolation = 3, -} - -impl Display for UpdateKeyFailure { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - match self { - UpdateKeyFailure::MissingKey => formatter.write_str( - "Unable to update the value under an associated key that does not exist", - ), - UpdateKeyFailure::PermissionDenied => formatter - .write_str("Unable to update associated key due to insufficient permissions"), - UpdateKeyFailure::ThresholdViolation => formatter.write_str( - "Unable to update weight that would fall below any of action thresholds", - ), - } - } -} - -// This conversion is not intended to be used by third party crates. -#[doc(hidden)] -impl TryFrom for UpdateKeyFailure { - type Error = TryFromIntError; - - fn try_from(value: i32) -> Result { - match value { - d if d == UpdateKeyFailure::MissingKey as i32 => Ok(UpdateKeyFailure::MissingKey), - d if d == UpdateKeyFailure::PermissionDenied as i32 => { - Ok(UpdateKeyFailure::PermissionDenied) - } - d if d == UpdateKeyFailure::ThresholdViolation as i32 => { - Ok(UpdateKeyFailure::ThresholdViolation) - } - _ => Err(TryFromIntError(())), - } - } -} - -#[doc(hidden)] -#[cfg(any(feature = "testing", feature = "gens", test))] -pub mod gens { - use proptest::prelude::*; - - use crate::{ - account::{ - action_thresholds::gens::action_thresholds_arb, - associated_keys::gens::associated_keys_arb, Account, Weight, - }, - gens::{account_hash_arb, named_keys_arb, uref_arb}, - }; - - prop_compose! { - pub fn account_arb()( - account_hash in account_hash_arb(), - urefs in named_keys_arb(3), - purse in uref_arb(), - thresholds in action_thresholds_arb(), - mut associated_keys in associated_keys_arb(), - ) -> Account { - associated_keys.add_key(account_hash, Weight::new(1)).unwrap(); - Account::new( - account_hash, - urefs, - purse, - associated_keys, - thresholds, - ) - } - } -} - -#[cfg(test)] -mod tests { - use crate::{ - account::{ - Account, AccountHash, ActionThresholds, ActionType, AssociatedKeys, RemoveKeyFailure, - SetThresholdFailure, UpdateKeyFailure, Weight, - }, - contracts::NamedKeys, - AccessRights, URef, - }; - use std::{collections::BTreeSet, convert::TryFrom, iter::FromIterator, vec::Vec}; - - use super::*; - - #[test] - fn account_hash_from_slice() { - let bytes: Vec = (0..32).collect(); - let account_hash = AccountHash::try_from(&bytes[..]).expect("should create account hash"); - assert_eq!(&bytes, &account_hash.as_bytes()); - } - - #[test] - fn account_hash_from_slice_too_small() { - let _account_hash = - AccountHash::try_from(&[0u8; 31][..]).expect_err("should not create account hash"); - } - - #[test] - fn account_hash_from_slice_too_big() { - let _account_hash = - AccountHash::try_from(&[0u8; 33][..]).expect_err("should not create account hash"); - } - - #[test] - fn try_from_i32_for_set_threshold_failure() { - let max_valid_value_for_variant = SetThresholdFailure::InsufficientTotalWeight as i32; - assert_eq!( - Err(TryFromIntError(())), - SetThresholdFailure::try_from(max_valid_value_for_variant + 1), - "Did you forget to update `SetThresholdFailure::try_from` for a new variant of \ - `SetThresholdFailure`, or `max_valid_value_for_variant` in this test?" - ); - } - - #[test] - fn try_from_i32_for_add_key_failure() { - let max_valid_value_for_variant = AddKeyFailure::PermissionDenied as i32; - assert_eq!( - Err(TryFromIntError(())), - AddKeyFailure::try_from(max_valid_value_for_variant + 1), - "Did you forget to update `AddKeyFailure::try_from` for a new variant of \ - `AddKeyFailure`, or `max_valid_value_for_variant` in this test?" - ); - } - - #[test] - fn try_from_i32_for_remove_key_failure() { - let max_valid_value_for_variant = RemoveKeyFailure::ThresholdViolation as i32; - assert_eq!( - Err(TryFromIntError(())), - RemoveKeyFailure::try_from(max_valid_value_for_variant + 1), - "Did you forget to update `RemoveKeyFailure::try_from` for a new variant of \ - `RemoveKeyFailure`, or `max_valid_value_for_variant` in this test?" - ); - } - - #[test] - fn try_from_i32_for_update_key_failure() { - let max_valid_value_for_variant = UpdateKeyFailure::ThresholdViolation as i32; - assert_eq!( - Err(TryFromIntError(())), - UpdateKeyFailure::try_from(max_valid_value_for_variant + 1), - "Did you forget to update `UpdateKeyFailure::try_from` for a new variant of \ - `UpdateKeyFailure`, or `max_valid_value_for_variant` in this test?" - ); - } - - #[test] - fn account_hash_from_str() { - let account_hash = AccountHash([3; 32]); - let encoded = account_hash.to_formatted_string(); - let decoded = AccountHash::from_formatted_str(&encoded).unwrap(); - assert_eq!(account_hash, decoded); - - let invalid_prefix = - "accounthash-0000000000000000000000000000000000000000000000000000000000000000"; - assert!(AccountHash::from_formatted_str(invalid_prefix).is_err()); - - let invalid_prefix = - "account-hash0000000000000000000000000000000000000000000000000000000000000000"; - assert!(AccountHash::from_formatted_str(invalid_prefix).is_err()); - - let short_addr = - "account-hash-00000000000000000000000000000000000000000000000000000000000000"; - assert!(AccountHash::from_formatted_str(short_addr).is_err()); - - let long_addr = - "account-hash-000000000000000000000000000000000000000000000000000000000000000000"; - assert!(AccountHash::from_formatted_str(long_addr).is_err()); - - let invalid_hex = - "account-hash-000000000000000000000000000000000000000000000000000000000000000g"; - assert!(AccountHash::from_formatted_str(invalid_hex).is_err()); - } - - #[test] - fn account_hash_serde_roundtrip() { - let account_hash = AccountHash([255; 32]); - let serialized = bincode::serialize(&account_hash).unwrap(); - let decoded = bincode::deserialize(&serialized).unwrap(); - assert_eq!(account_hash, decoded); - } - - #[test] - fn account_hash_json_roundtrip() { - let account_hash = AccountHash([255; 32]); - let json_string = serde_json::to_string_pretty(&account_hash).unwrap(); - let decoded = serde_json::from_str(&json_string).unwrap(); - assert_eq!(account_hash, decoded); - } - - #[test] - fn associated_keys_can_authorize_keys() { - let key_1 = AccountHash::new([0; 32]); - let key_2 = AccountHash::new([1; 32]); - let key_3 = AccountHash::new([2; 32]); - let mut keys = AssociatedKeys::default(); - - keys.add_key(key_2, Weight::new(2)) - .expect("should add key_1"); - keys.add_key(key_1, Weight::new(1)) - .expect("should add key_1"); - keys.add_key(key_3, Weight::new(3)) - .expect("should add key_1"); - - let account = Account::new( - AccountHash::new([0u8; 32]), - NamedKeys::new(), - URef::new([0u8; 32], AccessRights::READ_ADD_WRITE), - keys, - // deploy: 33 (3*11) - ActionThresholds::new(Weight::new(33), Weight::new(48)) - .expect("should create thresholds"), - ); - - assert!(account.can_authorize(&BTreeSet::from_iter(vec![key_3, key_2, key_1]))); - assert!(account.can_authorize(&BTreeSet::from_iter(vec![key_1, key_3, key_2]))); - - assert!(account.can_authorize(&BTreeSet::from_iter(vec![key_1, key_2]))); - assert!(account.can_authorize(&BTreeSet::from_iter(vec![key_1]))); - - assert!(!account.can_authorize(&BTreeSet::from_iter(vec![ - key_1, - key_2, - AccountHash::new([42; 32]) - ]))); - assert!(!account.can_authorize(&BTreeSet::from_iter(vec![ - AccountHash::new([42; 32]), - key_1, - key_2 - ]))); - assert!(!account.can_authorize(&BTreeSet::from_iter(vec![ - AccountHash::new([43; 32]), - AccountHash::new([44; 32]), - AccountHash::new([42; 32]) - ]))); - assert!(!account.can_authorize(&BTreeSet::new())); - } - - #[test] - fn account_can_deploy_with() { - let associated_keys = { - let mut res = AssociatedKeys::new(AccountHash::new([1u8; 32]), Weight::new(1)); - res.add_key(AccountHash::new([2u8; 32]), Weight::new(11)) - .expect("should add key 1"); - res.add_key(AccountHash::new([3u8; 32]), Weight::new(11)) - .expect("should add key 2"); - res.add_key(AccountHash::new([4u8; 32]), Weight::new(11)) - .expect("should add key 3"); - res - }; - let account = Account::new( - AccountHash::new([0u8; 32]), - NamedKeys::new(), - URef::new([0u8; 32], AccessRights::READ_ADD_WRITE), - associated_keys, - // deploy: 33 (3*11) - ActionThresholds::new(Weight::new(33), Weight::new(48)) - .expect("should create thresholds"), - ); - - // sum: 22, required 33 - can't deploy - assert!(!account.can_deploy_with(&BTreeSet::from_iter(vec![ - AccountHash::new([3u8; 32]), - AccountHash::new([2u8; 32]), - ]))); - - // sum: 33, required 33 - can deploy - assert!(account.can_deploy_with(&BTreeSet::from_iter(vec![ - AccountHash::new([4u8; 32]), - AccountHash::new([3u8; 32]), - AccountHash::new([2u8; 32]), - ]))); - - // sum: 34, required 33 - can deploy - assert!(account.can_deploy_with(&BTreeSet::from_iter(vec![ - AccountHash::new([2u8; 32]), - AccountHash::new([1u8; 32]), - AccountHash::new([4u8; 32]), - AccountHash::new([3u8; 32]), - ]))); - } - - #[test] - fn account_can_manage_keys_with() { - let associated_keys = { - let mut res = AssociatedKeys::new(AccountHash::new([1u8; 32]), Weight::new(1)); - res.add_key(AccountHash::new([2u8; 32]), Weight::new(11)) - .expect("should add key 1"); - res.add_key(AccountHash::new([3u8; 32]), Weight::new(11)) - .expect("should add key 2"); - res.add_key(AccountHash::new([4u8; 32]), Weight::new(11)) - .expect("should add key 3"); - res - }; - let account = Account::new( - AccountHash::new([0u8; 32]), - NamedKeys::new(), - URef::new([0u8; 32], AccessRights::READ_ADD_WRITE), - associated_keys, - // deploy: 33 (3*11) - ActionThresholds::new(Weight::new(11), Weight::new(33)) - .expect("should create thresholds"), - ); - - // sum: 22, required 33 - can't manage - assert!(!account.can_manage_keys_with(&BTreeSet::from_iter(vec![ - AccountHash::new([3u8; 32]), - AccountHash::new([2u8; 32]), - ]))); - - // sum: 33, required 33 - can manage - assert!(account.can_manage_keys_with(&BTreeSet::from_iter(vec![ - AccountHash::new([4u8; 32]), - AccountHash::new([3u8; 32]), - AccountHash::new([2u8; 32]), - ]))); - - // sum: 34, required 33 - can manage - assert!(account.can_manage_keys_with(&BTreeSet::from_iter(vec![ - AccountHash::new([2u8; 32]), - AccountHash::new([1u8; 32]), - AccountHash::new([4u8; 32]), - AccountHash::new([3u8; 32]), - ]))); - } - - #[test] - fn set_action_threshold_higher_than_total_weight() { - let identity_key = AccountHash::new([1u8; 32]); - let key_1 = AccountHash::new([2u8; 32]); - let key_2 = AccountHash::new([3u8; 32]); - let key_3 = AccountHash::new([4u8; 32]); - let associated_keys = { - let mut res = AssociatedKeys::new(identity_key, Weight::new(1)); - res.add_key(key_1, Weight::new(2)) - .expect("should add key 1"); - res.add_key(key_2, Weight::new(3)) - .expect("should add key 2"); - res.add_key(key_3, Weight::new(4)) - .expect("should add key 3"); - res - }; - let mut account = Account::new( - AccountHash::new([0u8; 32]), - NamedKeys::new(), - URef::new([0u8; 32], AccessRights::READ_ADD_WRITE), - associated_keys, - // deploy: 33 (3*11) - ActionThresholds::new(Weight::new(33), Weight::new(48)) - .expect("should create thresholds"), - ); - - assert_eq!( - account - .set_action_threshold(ActionType::Deployment, Weight::new(1 + 2 + 3 + 4 + 1)) - .unwrap_err(), - SetThresholdFailure::InsufficientTotalWeight, - ); - assert_eq!( - account - .set_action_threshold(ActionType::Deployment, Weight::new(1 + 2 + 3 + 4 + 245)) - .unwrap_err(), - SetThresholdFailure::InsufficientTotalWeight, - ) - } - - #[test] - fn remove_key_would_violate_action_thresholds() { - let identity_key = AccountHash::new([1u8; 32]); - let key_1 = AccountHash::new([2u8; 32]); - let key_2 = AccountHash::new([3u8; 32]); - let key_3 = AccountHash::new([4u8; 32]); - let associated_keys = { - let mut res = AssociatedKeys::new(identity_key, Weight::new(1)); - res.add_key(key_1, Weight::new(2)) - .expect("should add key 1"); - res.add_key(key_2, Weight::new(3)) - .expect("should add key 2"); - res.add_key(key_3, Weight::new(4)) - .expect("should add key 3"); - res - }; - let mut account = Account::new( - AccountHash::new([0u8; 32]), - NamedKeys::new(), - URef::new([0u8; 32], AccessRights::READ_ADD_WRITE), - associated_keys, - // deploy: 33 (3*11) - ActionThresholds::new(Weight::new(1 + 2 + 3 + 4), Weight::new(1 + 2 + 3 + 4 + 5)) - .expect("should create thresholds"), - ); - - assert_eq!( - account.remove_associated_key(key_3).unwrap_err(), - RemoveKeyFailure::ThresholdViolation, - ) - } - - #[test] - fn updating_key_would_violate_action_thresholds() { - let identity_key = AccountHash::new([1u8; 32]); - let identity_key_weight = Weight::new(1); - let key_1 = AccountHash::new([2u8; 32]); - let key_1_weight = Weight::new(2); - let key_2 = AccountHash::new([3u8; 32]); - let key_2_weight = Weight::new(3); - let key_3 = AccountHash::new([4u8; 32]); - let key_3_weight = Weight::new(4); - let associated_keys = { - let mut res = AssociatedKeys::new(identity_key, identity_key_weight); - res.add_key(key_1, key_1_weight).expect("should add key 1"); - res.add_key(key_2, key_2_weight).expect("should add key 2"); - res.add_key(key_3, key_3_weight).expect("should add key 3"); - // 1 + 2 + 3 + 4 - res - }; - - let deployment_threshold = Weight::new( - identity_key_weight.value() - + key_1_weight.value() - + key_2_weight.value() - + key_3_weight.value(), - ); - let key_management_threshold = Weight::new(deployment_threshold.value() + 1); - let mut account = Account::new( - identity_key, - NamedKeys::new(), - URef::new([0u8; 32], AccessRights::READ_ADD_WRITE), - associated_keys, - // deploy: 33 (3*11) - ActionThresholds::new(deployment_threshold, key_management_threshold) - .expect("should create thresholds"), - ); - - // Decreases by 3 - assert_eq!( - account - .clone() - .update_associated_key(key_3, Weight::new(1)) - .unwrap_err(), - UpdateKeyFailure::ThresholdViolation, - ); - - // increase total weight (12) - account - .update_associated_key(identity_key, Weight::new(3)) - .unwrap(); - - // variant a) decrease total weight by 1 (total 11) - account - .clone() - .update_associated_key(key_3, Weight::new(3)) - .unwrap(); - // variant b) decrease total weight by 3 (total 9) - fail - assert_eq!( - account - .update_associated_key(key_3, Weight::new(1)) - .unwrap_err(), - UpdateKeyFailure::ThresholdViolation - ); - } - - #[test] - fn overflowing_should_allow_removal() { - let identity_key = AccountHash::new([42; 32]); - let key_1 = AccountHash::new([2u8; 32]); - let key_2 = AccountHash::new([3u8; 32]); - - let associated_keys = { - // Identity - let mut res = AssociatedKeys::new(identity_key, Weight::new(1)); - - // Spare key - res.add_key(key_1, Weight::new(2)) - .expect("should add key 1"); - // Big key - res.add_key(key_2, Weight::new(255)) - .expect("should add key 2"); - - res - }; - - let mut account = Account::new( - identity_key, - NamedKeys::new(), - URef::new([0u8; 32], AccessRights::READ_ADD_WRITE), - associated_keys, - ActionThresholds::new(Weight::new(1), Weight::new(254)) - .expect("should create thresholds"), - ); - - account.remove_associated_key(key_1).expect("should work") - } - - #[test] - fn overflowing_should_allow_updating() { - let identity_key = AccountHash::new([1; 32]); - let identity_key_weight = Weight::new(1); - let key_1 = AccountHash::new([2u8; 32]); - let key_1_weight = Weight::new(3); - let key_2 = AccountHash::new([3u8; 32]); - let key_2_weight = Weight::new(255); - let deployment_threshold = Weight::new(1); - let key_management_threshold = Weight::new(254); - - let associated_keys = { - // Identity - let mut res = AssociatedKeys::new(identity_key, identity_key_weight); - - // Spare key - res.add_key(key_1, key_1_weight).expect("should add key 1"); - // Big key - res.add_key(key_2, key_2_weight).expect("should add key 2"); - - res - }; - - let mut account = Account::new( - identity_key, - NamedKeys::new(), - URef::new([0u8; 32], AccessRights::READ_ADD_WRITE), - associated_keys, - ActionThresholds::new(deployment_threshold, key_management_threshold) - .expect("should create thresholds"), - ); - - // decrease so total weight would be changed from 1 + 3 + 255 to 1 + 1 + 255 - account - .update_associated_key(key_1, Weight::new(1)) - .expect("should work"); - } - - #[test] - fn should_extract_access_rights() { - const MAIN_PURSE: URef = URef::new([2; 32], AccessRights::READ_ADD_WRITE); - const OTHER_UREF: URef = URef::new([3; 32], AccessRights::READ); - - let account_hash = AccountHash::new([1u8; 32]); - let mut named_keys = NamedKeys::new(); - named_keys.insert("a".to_string(), Key::URef(OTHER_UREF)); - let associated_keys = AssociatedKeys::new(account_hash, Weight::new(1)); - let account = Account::new( - account_hash, - named_keys, - MAIN_PURSE, - associated_keys, - ActionThresholds::new(Weight::new(1), Weight::new(1)) - .expect("should create thresholds"), - ); - - let actual_access_rights = account.extract_access_rights(); - - let expected_access_rights = - ContextAccessRights::new(Key::from(account_hash), vec![MAIN_PURSE, OTHER_UREF]); - assert_eq!(actual_access_rights, expected_access_rights) - } -} - -#[cfg(test)] -mod proptests { - use proptest::prelude::*; - - use crate::bytesrepr; - - use super::*; - - proptest! { - #[test] - fn test_value_account(acct in gens::account_arb()) { - bytesrepr::test_serialization_roundtrip(&acct); - } - } -} diff --git a/casper_types/src/account/account_hash.rs b/casper_types/src/account/account_hash.rs deleted file mode 100644 index 5c798be5..00000000 --- a/casper_types/src/account/account_hash.rs +++ /dev/null @@ -1,218 +0,0 @@ -use alloc::{string::String, vec::Vec}; -use core::{ - convert::{From, TryFrom}, - fmt::{Debug, Display, Formatter}, -}; -#[cfg(feature = "datasize")] -use datasize::DataSize; -use rand::{ - distributions::{Distribution, Standard}, - Rng, -}; -#[cfg(feature = "json-schema")] -use schemars::{gen::SchemaGenerator, schema::Schema, JsonSchema}; -use serde::{de::Error as SerdeError, Deserialize, Deserializer, Serialize, Serializer}; - -use super::FromStrError; -use crate::{ - bytesrepr::{Error, FromBytes, ToBytes}, - checksummed_hex, crypto, CLType, CLTyped, PublicKey, BLAKE2B_DIGEST_LENGTH, -}; - -/// The length in bytes of a [`AccountHash`]. -pub const ACCOUNT_HASH_LENGTH: usize = 32; -/// The prefix applied to the hex-encoded `AccountHash` to produce a formatted string -/// representation. -pub const ACCOUNT_HASH_FORMATTED_STRING_PREFIX: &str = "account-hash-"; - -/// A newtype wrapping an array which contains the raw bytes of -/// the AccountHash, a hash of Public Key and Algorithm -#[derive(Default, PartialOrd, Ord, PartialEq, Eq, Hash, Clone, Copy)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -pub struct AccountHash(pub [u8; ACCOUNT_HASH_LENGTH]); - -impl AccountHash { - /// Constructs a new `AccountHash` instance from the raw bytes of an Public Key Account Hash. - pub const fn new(value: [u8; ACCOUNT_HASH_LENGTH]) -> AccountHash { - AccountHash(value) - } - - /// Returns the raw bytes of the account hash as an array. - pub fn value(&self) -> [u8; ACCOUNT_HASH_LENGTH] { - self.0 - } - - /// Returns the raw bytes of the account hash as a `slice`. - pub fn as_bytes(&self) -> &[u8] { - &self.0 - } - - /// Formats the `AccountHash` for users getting and putting. - pub fn to_formatted_string(self) -> String { - format!( - "{}{}", - ACCOUNT_HASH_FORMATTED_STRING_PREFIX, - base16::encode_lower(&self.0), - ) - } - - /// Parses a string formatted as per `Self::to_formatted_string()` into an `AccountHash`. - pub fn from_formatted_str(input: &str) -> Result { - let remainder = input - .strip_prefix(ACCOUNT_HASH_FORMATTED_STRING_PREFIX) - .ok_or(FromStrError::InvalidPrefix)?; - let bytes = - <[u8; ACCOUNT_HASH_LENGTH]>::try_from(checksummed_hex::decode(remainder)?.as_ref())?; - Ok(AccountHash(bytes)) - } - - /// Parses a `PublicKey` and outputs the corresponding account hash. - pub fn from_public_key( - public_key: &PublicKey, - blake2b_hash_fn: impl Fn(Vec) -> [u8; BLAKE2B_DIGEST_LENGTH], - ) -> Self { - const SYSTEM_LOWERCASE: &str = "system"; - const ED25519_LOWERCASE: &str = "ed25519"; - const SECP256K1_LOWERCASE: &str = "secp256k1"; - - let algorithm_name = match public_key { - PublicKey::System => SYSTEM_LOWERCASE, - PublicKey::Ed25519(_) => ED25519_LOWERCASE, - PublicKey::Secp256k1(_) => SECP256K1_LOWERCASE, - }; - let public_key_bytes: Vec = public_key.into(); - - // Prepare preimage based on the public key parameters. - let preimage = { - let mut data = Vec::with_capacity(algorithm_name.len() + public_key_bytes.len() + 1); - data.extend(algorithm_name.as_bytes()); - data.push(0); - data.extend(public_key_bytes); - data - }; - // Hash the preimage data using blake2b256 and return it. - let digest = blake2b_hash_fn(preimage); - Self::new(digest) - } -} - -#[cfg(feature = "json-schema")] -impl JsonSchema for AccountHash { - fn schema_name() -> String { - String::from("AccountHash") - } - - fn json_schema(gen: &mut SchemaGenerator) -> Schema { - let schema = gen.subschema_for::(); - let mut schema_object = schema.into_object(); - schema_object.metadata().description = Some("Hex-encoded account hash.".to_string()); - schema_object.into() - } -} - -impl Serialize for AccountHash { - fn serialize(&self, serializer: S) -> Result { - if serializer.is_human_readable() { - self.to_formatted_string().serialize(serializer) - } else { - self.0.serialize(serializer) - } - } -} - -impl<'de> Deserialize<'de> for AccountHash { - fn deserialize>(deserializer: D) -> Result { - if deserializer.is_human_readable() { - let formatted_string = String::deserialize(deserializer)?; - AccountHash::from_formatted_str(&formatted_string).map_err(SerdeError::custom) - } else { - let bytes = <[u8; ACCOUNT_HASH_LENGTH]>::deserialize(deserializer)?; - Ok(AccountHash(bytes)) - } - } -} - -impl TryFrom<&[u8]> for AccountHash { - type Error = TryFromSliceForAccountHashError; - - fn try_from(bytes: &[u8]) -> Result { - <[u8; ACCOUNT_HASH_LENGTH]>::try_from(bytes) - .map(AccountHash::new) - .map_err(|_| TryFromSliceForAccountHashError(())) - } -} - -impl TryFrom<&alloc::vec::Vec> for AccountHash { - type Error = TryFromSliceForAccountHashError; - - fn try_from(bytes: &Vec) -> Result { - <[u8; ACCOUNT_HASH_LENGTH]>::try_from(bytes as &[u8]) - .map(AccountHash::new) - .map_err(|_| TryFromSliceForAccountHashError(())) - } -} - -impl From<&PublicKey> for AccountHash { - fn from(public_key: &PublicKey) -> Self { - AccountHash::from_public_key(public_key, crypto::blake2b) - } -} - -impl Display for AccountHash { - fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result { - write!(f, "{}", base16::encode_lower(&self.0)) - } -} - -impl Debug for AccountHash { - fn fmt(&self, f: &mut Formatter) -> core::fmt::Result { - write!(f, "AccountHash({})", base16::encode_lower(&self.0)) - } -} - -impl CLTyped for AccountHash { - fn cl_type() -> CLType { - CLType::ByteArray(ACCOUNT_HASH_LENGTH as u32) - } -} - -impl ToBytes for AccountHash { - #[inline(always)] - fn to_bytes(&self) -> Result, Error> { - self.0.to_bytes() - } - - #[inline(always)] - fn serialized_length(&self) -> usize { - self.0.serialized_length() - } - - #[inline(always)] - fn write_bytes(&self, writer: &mut Vec) -> Result<(), Error> { - writer.extend_from_slice(&self.0); - Ok(()) - } -} - -impl FromBytes for AccountHash { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (bytes, rem) = FromBytes::from_bytes(bytes)?; - Ok((AccountHash::new(bytes), rem)) - } -} - -impl AsRef<[u8]> for AccountHash { - fn as_ref(&self) -> &[u8] { - self.0.as_ref() - } -} - -/// Associated error type of `TryFrom<&[u8]>` for [`AccountHash`]. -#[derive(Debug)] -pub struct TryFromSliceForAccountHashError(()); - -impl Distribution for Standard { - fn sample(&self, rng: &mut R) -> AccountHash { - AccountHash::new(rng.gen()) - } -} diff --git a/casper_types/src/account/action_thresholds.rs b/casper_types/src/account/action_thresholds.rs deleted file mode 100644 index 48eb21b3..00000000 --- a/casper_types/src/account/action_thresholds.rs +++ /dev/null @@ -1,170 +0,0 @@ -//! This module contains types and functions for managing action thresholds. - -use alloc::vec::Vec; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -use serde::{Deserialize, Serialize}; - -use crate::{ - account::{ActionType, SetThresholdFailure, Weight, WEIGHT_SERIALIZED_LENGTH}, - bytesrepr::{self, Error, FromBytes, ToBytes}, -}; - -/// Thresholds that have to be met when executing an action of a certain type. -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -pub struct ActionThresholds { - /// Threshold for deploy execution. - pub deployment: Weight, - /// Threshold for managing action threshold. - pub key_management: Weight, -} - -impl ActionThresholds { - /// Creates new ActionThresholds object with provided weights - /// - /// Requires deployment threshold to be lower than or equal to - /// key management threshold. - pub fn new( - deployment: Weight, - key_management: Weight, - ) -> Result { - if deployment > key_management { - return Err(SetThresholdFailure::DeploymentThreshold); - } - Ok(ActionThresholds { - deployment, - key_management, - }) - } - /// Sets new threshold for [ActionType::Deployment]. - /// Should return an error if setting new threshold for `action_type` breaks - /// one of the invariants. Currently, invariant is that - /// `ActionType::Deployment` threshold shouldn't be higher than any - /// other, which should be checked both when increasing `Deployment` - /// threshold and decreasing the other. - pub fn set_deployment_threshold( - &mut self, - new_threshold: Weight, - ) -> Result<(), SetThresholdFailure> { - if new_threshold > self.key_management { - Err(SetThresholdFailure::DeploymentThreshold) - } else { - self.deployment = new_threshold; - Ok(()) - } - } - - /// Sets new threshold for [ActionType::KeyManagement]. - pub fn set_key_management_threshold( - &mut self, - new_threshold: Weight, - ) -> Result<(), SetThresholdFailure> { - if self.deployment > new_threshold { - Err(SetThresholdFailure::KeyManagementThreshold) - } else { - self.key_management = new_threshold; - Ok(()) - } - } - - /// Returns the deployment action threshold. - pub fn deployment(&self) -> &Weight { - &self.deployment - } - - /// Returns key management action threshold. - pub fn key_management(&self) -> &Weight { - &self.key_management - } - - /// Unified function that takes an action type, and changes appropriate - /// threshold defined by the [ActionType] variants. - pub fn set_threshold( - &mut self, - action_type: ActionType, - new_threshold: Weight, - ) -> Result<(), SetThresholdFailure> { - match action_type { - ActionType::Deployment => self.set_deployment_threshold(new_threshold), - ActionType::KeyManagement => self.set_key_management_threshold(new_threshold), - } - } -} - -impl Default for ActionThresholds { - fn default() -> Self { - ActionThresholds { - deployment: Weight::new(1), - key_management: Weight::new(1), - } - } -} - -impl ToBytes for ActionThresholds { - fn to_bytes(&self) -> Result, Error> { - let mut result = bytesrepr::unchecked_allocate_buffer(self); - result.append(&mut self.deployment.to_bytes()?); - result.append(&mut self.key_management.to_bytes()?); - Ok(result) - } - - fn serialized_length(&self) -> usize { - 2 * WEIGHT_SERIALIZED_LENGTH - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.deployment().write_bytes(writer)?; - self.key_management().write_bytes(writer)?; - Ok(()) - } -} - -impl FromBytes for ActionThresholds { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (deployment, rem) = Weight::from_bytes(bytes)?; - let (key_management, rem) = Weight::from_bytes(rem)?; - let ret = ActionThresholds { - deployment, - key_management, - }; - Ok((ret, rem)) - } -} - -#[doc(hidden)] -#[cfg(any(feature = "testing", feature = "gens", test))] -pub mod gens { - use proptest::prelude::*; - - use super::ActionThresholds; - - pub fn action_thresholds_arb() -> impl Strategy { - Just(Default::default()) - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn should_create_new_action_thresholds() { - let action_thresholds = ActionThresholds::new(Weight::new(1), Weight::new(42)).unwrap(); - assert_eq!(*action_thresholds.deployment(), Weight::new(1)); - assert_eq!(*action_thresholds.key_management(), Weight::new(42)); - } - - #[test] - fn should_not_create_action_thresholds_with_invalid_deployment_threshold() { - // deployment cant be greater than key management - assert!(ActionThresholds::new(Weight::new(5), Weight::new(1)).is_err()); - } - - #[test] - fn serialization_roundtrip() { - let action_thresholds = ActionThresholds::new(Weight::new(1), Weight::new(42)).unwrap(); - bytesrepr::test_serialization_roundtrip(&action_thresholds); - } -} diff --git a/casper_types/src/account/action_type.rs b/casper_types/src/account/action_type.rs deleted file mode 100644 index 2a4862a5..00000000 --- a/casper_types/src/account/action_type.rs +++ /dev/null @@ -1,32 +0,0 @@ -use core::convert::TryFrom; - -use super::TryFromIntError; - -/// The various types of action which can be performed in the context of a given account. -#[repr(u32)] -pub enum ActionType { - /// Represents performing a deploy. - Deployment = 0, - /// Represents changing the associated keys (i.e. map of [`AccountHash`](super::AccountHash)s - /// to [`Weight`](super::Weight)s) or action thresholds (i.e. the total - /// [`Weight`](super::Weight)s of signing [`AccountHash`](super::AccountHash)s required to - /// perform various actions). - KeyManagement = 1, -} - -// This conversion is not intended to be used by third party crates. -#[doc(hidden)] -impl TryFrom for ActionType { - type Error = TryFromIntError; - - fn try_from(value: u32) -> Result { - // This doesn't use `num_derive` traits such as FromPrimitive and ToPrimitive - // that helps to automatically create `from_u32` and `to_u32`. This approach - // gives better control over generated code. - match value { - d if d == ActionType::Deployment as u32 => Ok(ActionType::Deployment), - d if d == ActionType::KeyManagement as u32 => Ok(ActionType::KeyManagement), - _ => Err(TryFromIntError(())), - } - } -} diff --git a/casper_types/src/account/associated_keys.rs b/casper_types/src/account/associated_keys.rs deleted file mode 100644 index 698fa071..00000000 --- a/casper_types/src/account/associated_keys.rs +++ /dev/null @@ -1,360 +0,0 @@ -//! This module contains types and functions for working with keys associated with an account. - -use alloc::{ - collections::{btree_map::Entry, BTreeMap, BTreeSet}, - vec::Vec, -}; - -use core::convert::TryInto; -#[cfg(feature = "datasize")] -use datasize::DataSize; - -use serde::{Deserialize, Serialize}; - -use crate::{ - account::{AccountHash, AddKeyFailure, RemoveKeyFailure, UpdateKeyFailure, Weight}, - bytesrepr::{self, Error, FromBytes, ToBytes}, -}; - -/// A mapping that represents the association of a [`Weight`] with an [`AccountHash`]. -#[derive(Default, PartialOrd, Ord, PartialEq, Eq, Clone, Debug, Serialize, Deserialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -pub struct AssociatedKeys(BTreeMap); - -impl AssociatedKeys { - /// Constructs a new AssociatedKeys. - pub fn new(key: AccountHash, weight: Weight) -> AssociatedKeys { - let mut bt: BTreeMap = BTreeMap::new(); - bt.insert(key, weight); - AssociatedKeys(bt) - } - - /// Adds new AssociatedKey to the set. - /// Returns true if added successfully, false otherwise. - pub fn add_key(&mut self, key: AccountHash, weight: Weight) -> Result<(), AddKeyFailure> { - match self.0.entry(key) { - Entry::Vacant(entry) => { - entry.insert(weight); - } - Entry::Occupied(_) => return Err(AddKeyFailure::DuplicateKey), - } - Ok(()) - } - - /// Removes key from the associated keys set. - /// Returns true if value was found in the set prior to the removal, false - /// otherwise. - pub fn remove_key(&mut self, key: &AccountHash) -> Result<(), RemoveKeyFailure> { - self.0 - .remove(key) - .map(|_| ()) - .ok_or(RemoveKeyFailure::MissingKey) - } - - /// Adds new AssociatedKey to the set. - /// Returns true if added successfully, false otherwise. - pub fn update_key(&mut self, key: AccountHash, weight: Weight) -> Result<(), UpdateKeyFailure> { - match self.0.entry(key) { - Entry::Vacant(_) => { - return Err(UpdateKeyFailure::MissingKey); - } - Entry::Occupied(mut entry) => { - *entry.get_mut() = weight; - } - } - Ok(()) - } - - /// Returns the weight of an account hash. - pub fn get(&self, key: &AccountHash) -> Option<&Weight> { - self.0.get(key) - } - - /// Returns `true` if a given key exists. - pub fn contains_key(&self, key: &AccountHash) -> bool { - self.0.contains_key(key) - } - - /// Returns an iterator over the account hash and the weights. - pub fn iter(&self) -> impl Iterator { - self.0.iter() - } - - /// Returns the count of the associated keys. - pub fn len(&self) -> usize { - self.0.len() - } - - /// Returns `true` if the associated keys are empty. - pub fn is_empty(&self) -> bool { - self.0.is_empty() - } - - /// Helper method that calculates weight for keys that comes from any - /// source. - /// - /// This method is not concerned about uniqueness of the passed iterable. - /// Uniqueness is determined based on the input collection properties, - /// which is either BTreeSet (in [`AssociatedKeys::calculate_keys_weight`]) - /// or BTreeMap (in [`AssociatedKeys::total_keys_weight`]). - fn calculate_any_keys_weight<'a>(&self, keys: impl Iterator) -> Weight { - let total = keys - .filter_map(|key| self.0.get(key)) - .fold(0u8, |acc, w| acc.saturating_add(w.value())); - - Weight::new(total) - } - - /// Calculates total weight of authorization keys provided by an argument - pub fn calculate_keys_weight(&self, authorization_keys: &BTreeSet) -> Weight { - self.calculate_any_keys_weight(authorization_keys.iter()) - } - - /// Calculates total weight of all authorization keys - pub fn total_keys_weight(&self) -> Weight { - self.calculate_any_keys_weight(self.0.keys()) - } - - /// Calculates total weight of all authorization keys excluding a given key - pub fn total_keys_weight_excluding(&self, account_hash: AccountHash) -> Weight { - self.calculate_any_keys_weight(self.0.keys().filter(|&&element| element != account_hash)) - } -} - -impl From> for AssociatedKeys { - fn from(associated_keys: BTreeMap) -> Self { - Self(associated_keys) - } -} - -impl From for BTreeMap { - fn from(associated_keys: AssociatedKeys) -> Self { - associated_keys.0 - } -} - -impl ToBytes for AssociatedKeys { - fn to_bytes(&self) -> Result, Error> { - self.0.to_bytes() - } - - fn serialized_length(&self) -> usize { - self.0.serialized_length() - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - let length_32: u32 = self - .0 - .len() - .try_into() - .map_err(|_| Error::NotRepresentable)?; - writer.extend_from_slice(&length_32.to_le_bytes()); - for (key, weight) in self.0.iter() { - key.write_bytes(writer)?; - weight.write_bytes(writer)?; - } - Ok(()) - } -} - -impl FromBytes for AssociatedKeys { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (associated_keys, rem) = FromBytes::from_bytes(bytes)?; - Ok((AssociatedKeys(associated_keys), rem)) - } -} - -#[doc(hidden)] -#[cfg(any(feature = "testing", feature = "gens", test))] -pub mod gens { - use proptest::prelude::*; - - use crate::gens::{account_hash_arb, weight_arb}; - - use super::AssociatedKeys; - - pub fn associated_keys_arb() -> impl Strategy { - proptest::collection::btree_map(account_hash_arb(), weight_arb(), 10).prop_map(|keys| { - let mut associated_keys = AssociatedKeys::default(); - keys.into_iter().for_each(|(k, v)| { - associated_keys.add_key(k, v).unwrap(); - }); - associated_keys - }) - } -} - -#[cfg(test)] -mod tests { - use std::{collections::BTreeSet, iter::FromIterator}; - - use crate::{ - account::{AccountHash, AddKeyFailure, Weight, ACCOUNT_HASH_LENGTH}, - bytesrepr, - }; - - use super::*; - - #[test] - fn associated_keys_add() { - let mut keys = - AssociatedKeys::new(AccountHash::new([0u8; ACCOUNT_HASH_LENGTH]), Weight::new(1)); - let new_pk = AccountHash::new([1u8; ACCOUNT_HASH_LENGTH]); - let new_pk_weight = Weight::new(2); - assert!(keys.add_key(new_pk, new_pk_weight).is_ok()); - assert_eq!(keys.get(&new_pk), Some(&new_pk_weight)) - } - - #[test] - fn associated_keys_add_duplicate() { - let pk = AccountHash::new([0u8; ACCOUNT_HASH_LENGTH]); - let weight = Weight::new(1); - let mut keys = AssociatedKeys::new(pk, weight); - assert_eq!( - keys.add_key(pk, Weight::new(10)), - Err(AddKeyFailure::DuplicateKey) - ); - assert_eq!(keys.get(&pk), Some(&weight)); - } - - #[test] - fn associated_keys_remove() { - let pk = AccountHash::new([0u8; ACCOUNT_HASH_LENGTH]); - let weight = Weight::new(1); - let mut keys = AssociatedKeys::new(pk, weight); - assert!(keys.remove_key(&pk).is_ok()); - assert!(keys - .remove_key(&AccountHash::new([1u8; ACCOUNT_HASH_LENGTH])) - .is_err()); - } - - #[test] - fn associated_keys_update() { - let pk1 = AccountHash::new([0u8; ACCOUNT_HASH_LENGTH]); - let pk2 = AccountHash::new([1u8; ACCOUNT_HASH_LENGTH]); - let weight = Weight::new(1); - let mut keys = AssociatedKeys::new(pk1, weight); - assert!(matches!( - keys.update_key(pk2, Weight::new(2)) - .expect_err("should get error"), - UpdateKeyFailure::MissingKey - )); - keys.add_key(pk2, Weight::new(1)).unwrap(); - assert_eq!(keys.get(&pk2), Some(&Weight::new(1))); - keys.update_key(pk2, Weight::new(2)).unwrap(); - assert_eq!(keys.get(&pk2), Some(&Weight::new(2))); - } - - #[test] - fn associated_keys_calculate_keys_once() { - let key_1 = AccountHash::new([0; 32]); - let key_2 = AccountHash::new([1; 32]); - let key_3 = AccountHash::new([2; 32]); - let mut keys = AssociatedKeys::default(); - - keys.add_key(key_2, Weight::new(2)) - .expect("should add key_1"); - keys.add_key(key_1, Weight::new(1)) - .expect("should add key_1"); - keys.add_key(key_3, Weight::new(3)) - .expect("should add key_1"); - - assert_eq!( - keys.calculate_keys_weight(&BTreeSet::from_iter(vec![ - key_1, key_2, key_3, key_1, key_2, key_3, - ])), - Weight::new(1 + 2 + 3) - ); - } - - #[test] - fn associated_keys_total_weight() { - let associated_keys = { - let mut res = AssociatedKeys::new(AccountHash::new([1u8; 32]), Weight::new(1)); - res.add_key(AccountHash::new([2u8; 32]), Weight::new(11)) - .expect("should add key 1"); - res.add_key(AccountHash::new([3u8; 32]), Weight::new(12)) - .expect("should add key 2"); - res.add_key(AccountHash::new([4u8; 32]), Weight::new(13)) - .expect("should add key 3"); - res - }; - assert_eq!( - associated_keys.total_keys_weight(), - Weight::new(1 + 11 + 12 + 13) - ); - } - - #[test] - fn associated_keys_total_weight_excluding() { - let identity_key = AccountHash::new([1u8; 32]); - let identity_key_weight = Weight::new(1); - - let key_1 = AccountHash::new([2u8; 32]); - let key_1_weight = Weight::new(11); - - let key_2 = AccountHash::new([3u8; 32]); - let key_2_weight = Weight::new(12); - - let key_3 = AccountHash::new([4u8; 32]); - let key_3_weight = Weight::new(13); - - let associated_keys = { - let mut res = AssociatedKeys::new(identity_key, identity_key_weight); - res.add_key(key_1, key_1_weight).expect("should add key 1"); - res.add_key(key_2, key_2_weight).expect("should add key 2"); - res.add_key(key_3, key_3_weight).expect("should add key 3"); - res - }; - assert_eq!( - associated_keys.total_keys_weight_excluding(key_2), - Weight::new(identity_key_weight.value() + key_1_weight.value() + key_3_weight.value()) - ); - } - - #[test] - fn overflowing_keys_weight() { - let identity_key = AccountHash::new([1u8; 32]); - let key_1 = AccountHash::new([2u8; 32]); - let key_2 = AccountHash::new([3u8; 32]); - let key_3 = AccountHash::new([4u8; 32]); - - let identity_key_weight = Weight::new(250); - let weight_1 = Weight::new(1); - let weight_2 = Weight::new(2); - let weight_3 = Weight::new(3); - - let saturated_weight = Weight::new(u8::max_value()); - - let associated_keys = { - let mut res = AssociatedKeys::new(identity_key, identity_key_weight); - - res.add_key(key_1, weight_1).expect("should add key 1"); - res.add_key(key_2, weight_2).expect("should add key 2"); - res.add_key(key_3, weight_3).expect("should add key 3"); - res - }; - - assert_eq!( - associated_keys.calculate_keys_weight(&BTreeSet::from_iter(vec![ - identity_key, // 250 - key_1, // 251 - key_2, // 253 - key_3, // 256 - error - ])), - saturated_weight, - ); - } - - #[test] - fn serialization_roundtrip() { - let mut keys = AssociatedKeys::default(); - keys.add_key(AccountHash::new([1; 32]), Weight::new(1)) - .unwrap(); - keys.add_key(AccountHash::new([2; 32]), Weight::new(2)) - .unwrap(); - keys.add_key(AccountHash::new([3; 32]), Weight::new(3)) - .unwrap(); - bytesrepr::test_serialization_roundtrip(&keys); - } -} diff --git a/casper_types/src/account/error.rs b/casper_types/src/account/error.rs deleted file mode 100644 index 36b9cb7f..00000000 --- a/casper_types/src/account/error.rs +++ /dev/null @@ -1,110 +0,0 @@ -use core::{ - array::TryFromSliceError, - convert::TryFrom, - fmt::{self, Display, Formatter}, -}; - -// This error type is not intended to be used by third party crates. -#[doc(hidden)] -#[derive(Debug, Eq, PartialEq)] -pub struct TryFromIntError(pub(super) ()); - -/// Error returned when decoding an `AccountHash` from a formatted string. -#[derive(Debug)] -#[non_exhaustive] -pub enum FromStrError { - /// The prefix is invalid. - InvalidPrefix, - /// The hash is not valid hex. - Hex(base16::DecodeError), - /// The hash is the wrong length. - Hash(TryFromSliceError), -} - -impl From for FromStrError { - fn from(error: base16::DecodeError) -> Self { - FromStrError::Hex(error) - } -} - -impl From for FromStrError { - fn from(error: TryFromSliceError) -> Self { - FromStrError::Hash(error) - } -} - -impl Display for FromStrError { - fn fmt(&self, f: &mut Formatter) -> fmt::Result { - match self { - FromStrError::InvalidPrefix => write!(f, "prefix is not 'account-hash-'"), - FromStrError::Hex(error) => { - write!(f, "failed to decode address portion from hex: {}", error) - } - FromStrError::Hash(error) => write!(f, "address portion is wrong length: {}", error), - } - } -} - -/// Errors that can occur while changing action thresholds (i.e. the total -/// [`Weight`](super::Weight)s of signing [`AccountHash`](super::AccountHash)s required to perform -/// various actions) on an account. -#[repr(i32)] -#[derive(Debug, PartialEq, Eq, Copy, Clone)] -#[non_exhaustive] -pub enum SetThresholdFailure { - /// Setting the key-management threshold to a value lower than the deployment threshold is - /// disallowed. - KeyManagementThreshold = 1, - /// Setting the deployment threshold to a value greater than any other threshold is disallowed. - DeploymentThreshold = 2, - /// Caller doesn't have sufficient permissions to set new thresholds. - PermissionDeniedError = 3, - /// Setting a threshold to a value greater than the total weight of associated keys is - /// disallowed. - InsufficientTotalWeight = 4, -} - -// This conversion is not intended to be used by third party crates. -#[doc(hidden)] -impl TryFrom for SetThresholdFailure { - type Error = TryFromIntError; - - fn try_from(value: i32) -> Result { - match value { - d if d == SetThresholdFailure::KeyManagementThreshold as i32 => { - Ok(SetThresholdFailure::KeyManagementThreshold) - } - d if d == SetThresholdFailure::DeploymentThreshold as i32 => { - Ok(SetThresholdFailure::DeploymentThreshold) - } - d if d == SetThresholdFailure::PermissionDeniedError as i32 => { - Ok(SetThresholdFailure::PermissionDeniedError) - } - d if d == SetThresholdFailure::InsufficientTotalWeight as i32 => { - Ok(SetThresholdFailure::InsufficientTotalWeight) - } - _ => Err(TryFromIntError(())), - } - } -} - -impl Display for SetThresholdFailure { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - match self { - SetThresholdFailure::KeyManagementThreshold => formatter - .write_str("New threshold should be greater than or equal to deployment threshold"), - SetThresholdFailure::DeploymentThreshold => formatter.write_str( - "New threshold should be lower than or equal to key management threshold", - ), - SetThresholdFailure::PermissionDeniedError => formatter - .write_str("Unable to set action threshold due to insufficient permissions"), - SetThresholdFailure::InsufficientTotalWeight => formatter.write_str( - "New threshold should be lower or equal than total weight of associated keys", - ), - } - } -} - -/// Associated error type of `TryFrom<&[u8]>` for [`AccountHash`](super::AccountHash). -#[derive(Debug)] -pub struct TryFromSliceForAccountHashError(()); diff --git a/casper_types/src/account/weight.rs b/casper_types/src/account/weight.rs deleted file mode 100644 index b27d7737..00000000 --- a/casper_types/src/account/weight.rs +++ /dev/null @@ -1,62 +0,0 @@ -use alloc::vec::Vec; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -use serde::{Deserialize, Serialize}; - -use crate::{ - bytesrepr::{self, Error, FromBytes, ToBytes, U8_SERIALIZED_LENGTH}, - CLType, CLTyped, -}; - -/// The number of bytes in a serialized [`Weight`]. -pub const WEIGHT_SERIALIZED_LENGTH: usize = U8_SERIALIZED_LENGTH; - -/// The weight attributed to a given [`AccountHash`](super::AccountHash) in an account's associated -/// keys. -#[derive(PartialOrd, Ord, PartialEq, Eq, Clone, Copy, Debug, Serialize, Deserialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -pub struct Weight(u8); - -impl Weight { - /// Maximum possible weight. - pub const MAX: Weight = Weight(u8::MAX); - - /// Constructs a new `Weight`. - pub const fn new(weight: u8) -> Weight { - Weight(weight) - } - - /// Returns the value of `self` as a `u8`. - pub fn value(self) -> u8 { - self.0 - } -} - -impl ToBytes for Weight { - fn to_bytes(&self) -> Result, Error> { - self.0.to_bytes() - } - - fn serialized_length(&self) -> usize { - WEIGHT_SERIALIZED_LENGTH - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - writer.push(self.0); - Ok(()) - } -} - -impl FromBytes for Weight { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (byte, rem) = u8::from_bytes(bytes)?; - Ok((Weight::new(byte), rem)) - } -} - -impl CLTyped for Weight { - fn cl_type() -> CLType { - CLType::U8 - } -} diff --git a/casper_types/src/api_error.rs b/casper_types/src/api_error.rs deleted file mode 100644 index eb1da1a1..00000000 --- a/casper_types/src/api_error.rs +++ /dev/null @@ -1,874 +0,0 @@ -//! Contains [`ApiError`] and associated helper functions. - -use core::{ - convert::TryFrom, - fmt::{self, Debug, Formatter}, -}; - -use crate::{ - account::{ - AddKeyFailure, RemoveKeyFailure, SetThresholdFailure, TryFromIntError, - TryFromSliceForAccountHashError, UpdateKeyFailure, - }, - bytesrepr, contracts, - system::{auction, handle_payment, mint}, - CLValueError, -}; - -/// All `Error` variants defined in this library other than `Error::User` will convert to a `u32` -/// value less than or equal to `RESERVED_ERROR_MAX`. -const RESERVED_ERROR_MAX: u32 = u16::MAX as u32; // 0..=65535 - -/// Handle Payment errors will have this value added to them when being converted to a `u32`. -const POS_ERROR_OFFSET: u32 = RESERVED_ERROR_MAX - u8::MAX as u32; // 65280..=65535 - -/// Mint errors will have this value added to them when being converted to a `u32`. -const MINT_ERROR_OFFSET: u32 = (POS_ERROR_OFFSET - 1) - u8::MAX as u32; // 65024..=65279 - -/// Contract header errors will have this value added to them when being converted to a `u32`. -const HEADER_ERROR_OFFSET: u32 = (MINT_ERROR_OFFSET - 1) - u8::MAX as u32; // 64768..=65023 - -/// Contract header errors will have this value added to them when being converted to a `u32`. -const AUCTION_ERROR_OFFSET: u32 = (HEADER_ERROR_OFFSET - 1) - u8::MAX as u32; // 64512..=64767 - -/// Minimum value of user error's inclusive range. -const USER_ERROR_MIN: u32 = RESERVED_ERROR_MAX + 1; - -/// Maximum value of user error's inclusive range. -const USER_ERROR_MAX: u32 = 2 * RESERVED_ERROR_MAX + 1; - -/// Minimum value of Mint error's inclusive range. -const MINT_ERROR_MIN: u32 = MINT_ERROR_OFFSET; - -/// Maximum value of Mint error's inclusive range. -const MINT_ERROR_MAX: u32 = POS_ERROR_OFFSET - 1; - -/// Minimum value of Handle Payment error's inclusive range. -const HP_ERROR_MIN: u32 = POS_ERROR_OFFSET; - -/// Maximum value of Handle Payment error's inclusive range. -const HP_ERROR_MAX: u32 = RESERVED_ERROR_MAX; - -/// Minimum value of contract header error's inclusive range. -const HEADER_ERROR_MIN: u32 = HEADER_ERROR_OFFSET; - -/// Maximum value of contract header error's inclusive range. -const HEADER_ERROR_MAX: u32 = HEADER_ERROR_OFFSET + u8::MAX as u32; - -/// Minimum value of an auction contract error's inclusive range. -const AUCTION_ERROR_MIN: u32 = AUCTION_ERROR_OFFSET; - -/// Maximum value of an auction contract error's inclusive range. -const AUCTION_ERROR_MAX: u32 = AUCTION_ERROR_OFFSET + u8::MAX as u32; - -/// Errors which can be encountered while running a smart contract. -/// -/// An `ApiError` can be converted to a `u32` in order to be passed via the execution engine's -/// `ext_ffi::casper_revert()` function. This means the information each variant can convey is -/// limited. -/// -/// The variants are split into numeric ranges as follows: -/// -/// | Inclusive range | Variant(s) | -/// | ----------------| ----------------------------------------------------------------| -/// | [1, 64511] | all except reserved system contract error ranges defined below. | -/// | [64512, 64767] | `Auction` | -/// | [64768, 65023] | `ContractHeader` | -/// | [65024, 65279] | `Mint` | -/// | [65280, 65535] | `HandlePayment` | -/// | [65536, 131071] | `User` | -/// -/// Users can specify a C-style enum and implement `From` to ease usage of -/// `casper_contract::runtime::revert()`, e.g. -/// ``` -/// use casper_types::ApiError; -/// -/// #[repr(u16)] -/// enum FailureCode { -/// Zero = 0, // 65,536 as an ApiError::User -/// One, // 65,537 as an ApiError::User -/// Two // 65,538 as an ApiError::User -/// } -/// -/// impl From for ApiError { -/// fn from(code: FailureCode) -> Self { -/// ApiError::User(code as u16) -/// } -/// } -/// -/// assert_eq!(ApiError::User(1), FailureCode::One.into()); -/// assert_eq!(65_536, u32::from(ApiError::from(FailureCode::Zero))); -/// assert_eq!(65_538, u32::from(ApiError::from(FailureCode::Two))); -/// ``` -#[derive(Copy, Clone, PartialEq, Eq)] -#[non_exhaustive] -pub enum ApiError { - /// Optional data was unexpectedly `None`. - /// ``` - /// # use casper_types::ApiError; - /// assert_eq!(ApiError::from(1), ApiError::None); - /// ``` - None, - /// Specified argument not provided. - /// ``` - /// # use casper_types::ApiError; - /// assert_eq!(ApiError::from(2), ApiError::MissingArgument); - /// ``` - MissingArgument, - /// Argument not of correct type. - /// ``` - /// # use casper_types::ApiError; - /// assert_eq!(ApiError::from(3), ApiError::InvalidArgument); - /// ``` - InvalidArgument, - /// Failed to deserialize a value. - /// ``` - /// # use casper_types::ApiError; - /// assert_eq!(ApiError::from(4), ApiError::Deserialize); - /// ``` - Deserialize, - /// `casper_contract::storage::read()` returned an error. - /// ``` - /// # use casper_types::ApiError; - /// assert_eq!(ApiError::from(5), ApiError::Read); - /// ``` - Read, - /// The given key returned a `None` value. - /// ``` - /// # use casper_types::ApiError; - /// assert_eq!(ApiError::from(6), ApiError::ValueNotFound); - /// ``` - ValueNotFound, - /// Failed to find a specified contract. - /// ``` - /// # use casper_types::ApiError; - /// assert_eq!(ApiError::from(7), ApiError::ContractNotFound); - /// ``` - ContractNotFound, - /// A call to `casper_contract::runtime::get_key()` returned a failure. - /// ``` - /// # use casper_types::ApiError; - /// assert_eq!(ApiError::from(8), ApiError::GetKey); - /// ``` - GetKey, - /// The [`Key`](crate::Key) variant was not as expected. - /// ``` - /// # use casper_types::ApiError; - /// assert_eq!(ApiError::from(9), ApiError::UnexpectedKeyVariant); - /// ``` - UnexpectedKeyVariant, - /// Obsolete error variant (we no longer have ContractRef). - /// ``` - /// # use casper_types::ApiError; - /// assert_eq!(ApiError::from(10), ApiError::UnexpectedContractRefVariant); - /// ``` - UnexpectedContractRefVariant, // TODO: this variant is not used any longer and can be removed - /// Invalid purse name given. - /// ``` - /// # use casper_types::ApiError; - /// assert_eq!(ApiError::from(11), ApiError::InvalidPurseName); - /// ``` - InvalidPurseName, - /// Invalid purse retrieved. - /// ``` - /// # use casper_types::ApiError; - /// assert_eq!(ApiError::from(12), ApiError::InvalidPurse); - /// ``` - InvalidPurse, - /// Failed to upgrade contract at [`URef`](crate::URef). - /// ``` - /// # use casper_types::ApiError; - /// assert_eq!(ApiError::from(13), ApiError::UpgradeContractAtURef); - /// ``` - UpgradeContractAtURef, - /// Failed to transfer motes. - /// ``` - /// # use casper_types::ApiError; - /// assert_eq!(ApiError::from(14), ApiError::Transfer); - /// ``` - Transfer, - /// The given [`URef`](crate::URef) has no access rights. - /// ``` - /// # use casper_types::ApiError; - /// assert_eq!(ApiError::from(15), ApiError::NoAccessRights); - /// ``` - NoAccessRights, - /// A given type could not be constructed from a [`CLValue`](crate::CLValue). - /// ``` - /// # use casper_types::ApiError; - /// assert_eq!(ApiError::from(16), ApiError::CLTypeMismatch); - /// ``` - CLTypeMismatch, - /// Early end of stream while deserializing. - /// ``` - /// # use casper_types::ApiError; - /// assert_eq!(ApiError::from(17), ApiError::EarlyEndOfStream); - /// ``` - EarlyEndOfStream, - /// Formatting error while deserializing. - /// ``` - /// # use casper_types::ApiError; - /// assert_eq!(ApiError::from(18), ApiError::Formatting); - /// ``` - Formatting, - /// Not all input bytes were consumed in [`deserialize`](crate::bytesrepr::deserialize). - /// ``` - /// # use casper_types::ApiError; - /// assert_eq!(ApiError::from(19), ApiError::LeftOverBytes); - /// ``` - LeftOverBytes, - /// Out of memory error. - /// ``` - /// # use casper_types::ApiError; - /// assert_eq!(ApiError::from(20), ApiError::OutOfMemory); - /// ``` - OutOfMemory, - /// There are already maximum [`AccountHash`](crate::account::AccountHash)s associated with the - /// given account. - /// ``` - /// # use casper_types::ApiError; - /// assert_eq!(ApiError::from(21), ApiError::MaxKeysLimit); - /// ``` - MaxKeysLimit, - /// The given [`AccountHash`](crate::account::AccountHash) is already associated with the given - /// account. - /// ``` - /// # use casper_types::ApiError; - /// assert_eq!(ApiError::from(22), ApiError::DuplicateKey); - /// ``` - DuplicateKey, - /// Caller doesn't have sufficient permissions to perform the given action. - /// ``` - /// # use casper_types::ApiError; - /// assert_eq!(ApiError::from(23), ApiError::PermissionDenied); - /// ``` - PermissionDenied, - /// The given [`AccountHash`](crate::account::AccountHash) is not associated with the given - /// account. - /// ``` - /// # use casper_types::ApiError; - /// assert_eq!(ApiError::from(24), ApiError::MissingKey); - /// ``` - MissingKey, - /// Removing/updating the given associated [`AccountHash`](crate::account::AccountHash) would - /// cause the total [`Weight`](crate::account::Weight) of all remaining `AccountHash`s to - /// fall below one of the action thresholds for the given account. - /// ``` - /// # use casper_types::ApiError; - /// assert_eq!(ApiError::from(25), ApiError::ThresholdViolation); - /// ``` - ThresholdViolation, - /// Setting the key-management threshold to a value lower than the deployment threshold is - /// disallowed. - /// ``` - /// # use casper_types::ApiError; - /// assert_eq!(ApiError::from(26), ApiError::KeyManagementThreshold); - /// ``` - KeyManagementThreshold, - /// Setting the deployment threshold to a value greater than any other threshold is disallowed. - /// ``` - /// # use casper_types::ApiError; - /// assert_eq!(ApiError::from(27), ApiError::DeploymentThreshold); - /// ``` - DeploymentThreshold, - /// Setting a threshold to a value greater than the total weight of associated keys is - /// disallowed. - /// ``` - /// # use casper_types::ApiError; - /// assert_eq!(ApiError::from(28), ApiError::InsufficientTotalWeight); - /// ``` - InsufficientTotalWeight, - /// The given `u32` doesn't map to a [`SystemContractType`](crate::system::SystemContractType). - /// ``` - /// # use casper_types::ApiError; - /// assert_eq!(ApiError::from(29), ApiError::InvalidSystemContract); - /// ``` - InvalidSystemContract, - /// Failed to create a new purse. - /// ``` - /// # use casper_types::ApiError; - /// assert_eq!(ApiError::from(30), ApiError::PurseNotCreated); - /// ``` - PurseNotCreated, - /// An unhandled value, likely representing a bug in the code. - /// ``` - /// # use casper_types::ApiError; - /// assert_eq!(ApiError::from(31), ApiError::Unhandled); - /// ``` - Unhandled, - /// The provided buffer is too small to complete an operation. - /// ``` - /// # use casper_types::ApiError; - /// assert_eq!(ApiError::from(32), ApiError::BufferTooSmall); - /// ``` - BufferTooSmall, - /// No data available in the host buffer. - /// ``` - /// # use casper_types::ApiError; - /// assert_eq!(ApiError::from(33), ApiError::HostBufferEmpty); - /// ``` - HostBufferEmpty, - /// The host buffer has been set to a value and should be consumed first by a read operation. - /// ``` - /// # use casper_types::ApiError; - /// assert_eq!(ApiError::from(34), ApiError::HostBufferFull); - /// ``` - HostBufferFull, - /// Could not lay out an array in memory - /// ``` - /// # use casper_types::ApiError; - /// assert_eq!(ApiError::from(35), ApiError::AllocLayout); - /// ``` - AllocLayout, - /// The `dictionary_item_key` length exceeds the maximum length. - /// ``` - /// # use casper_types::ApiError; - /// assert_eq!(ApiError::from(36), ApiError::DictionaryItemKeyExceedsLength); - /// ``` - DictionaryItemKeyExceedsLength, - /// The `dictionary_item_key` is invalid. - /// ``` - /// # use casper_types::ApiError; - /// assert_eq!(ApiError::from(37), ApiError::InvalidDictionaryItemKey); - /// ``` - InvalidDictionaryItemKey, - /// Unable to retrieve the requested system contract hash. - /// ``` - /// # use casper_types::ApiError; - /// assert_eq!(ApiError::from(38), ApiError::MissingSystemContractHash); - /// ``` - MissingSystemContractHash, - /// Exceeded a recursion depth limit. - /// ``` - /// # use casper_types::ApiError; - /// assert_eq!(ApiError::from(39), ApiError::ExceededRecursionDepth); - /// ``` - ExceededRecursionDepth, - /// Attempt to serialize a value that does not have a serialized representation. - /// ``` - /// # use casper_types::ApiError; - /// assert_eq!(ApiError::from(40), ApiError::NonRepresentableSerialization); - /// ``` - NonRepresentableSerialization, - /// Error specific to Auction contract. See - /// [casper_types::system::auction::Error](crate::system::auction::Error). - /// ``` - /// # use casper_types::ApiError; - /// for code in 64512..=64767 { - /// assert!(matches!(ApiError::from(code), ApiError::AuctionError(_auction_error))); - /// } - /// ``` - AuctionError(u8), - /// Contract header errors. See [casper_types::contracts::Error](crate::contracts::Error). - /// - /// ``` - /// # use casper_types::ApiError; - /// for code in 64768..=65023 { - /// assert!(matches!(ApiError::from(code), ApiError::ContractHeader(_contract_header_error))); - /// } - /// ``` - ContractHeader(u8), - /// Error specific to Mint contract. See - /// [casper_types::system::mint::Error](crate::system::mint::Error). - /// ``` - /// # use casper_types::ApiError; - /// for code in 65024..=65279 { - /// assert!(matches!(ApiError::from(code), ApiError::Mint(_mint_error))); - /// } - /// ``` - Mint(u8), - /// Error specific to Handle Payment contract. See - /// [casper_types::system::handle_payment](crate::system::handle_payment::Error). - /// ``` - /// # use casper_types::ApiError; - /// for code in 65280..=65535 { - /// assert!(matches!(ApiError::from(code), ApiError::HandlePayment(_handle_payment_error))); - /// } - /// ``` - HandlePayment(u8), - /// User-specified error code. The internal `u16` value is added to `u16::MAX as u32 + 1` when - /// an `Error::User` is converted to a `u32`. - /// ``` - /// # use casper_types::ApiError; - /// for code in 65536..131071 { - /// assert!(matches!(ApiError::from(code), ApiError::User(_))); - /// } - /// ``` - User(u16), -} - -impl From for ApiError { - fn from(error: bytesrepr::Error) -> Self { - match error { - bytesrepr::Error::EarlyEndOfStream => ApiError::EarlyEndOfStream, - bytesrepr::Error::Formatting => ApiError::Formatting, - bytesrepr::Error::LeftOverBytes => ApiError::LeftOverBytes, - bytesrepr::Error::OutOfMemory => ApiError::OutOfMemory, - bytesrepr::Error::NotRepresentable => ApiError::NonRepresentableSerialization, - bytesrepr::Error::ExceededRecursionDepth => ApiError::ExceededRecursionDepth, - } - } -} - -impl From for ApiError { - fn from(error: AddKeyFailure) -> Self { - match error { - AddKeyFailure::MaxKeysLimit => ApiError::MaxKeysLimit, - AddKeyFailure::DuplicateKey => ApiError::DuplicateKey, - AddKeyFailure::PermissionDenied => ApiError::PermissionDenied, - } - } -} - -impl From for ApiError { - fn from(error: UpdateKeyFailure) -> Self { - match error { - UpdateKeyFailure::MissingKey => ApiError::MissingKey, - UpdateKeyFailure::PermissionDenied => ApiError::PermissionDenied, - UpdateKeyFailure::ThresholdViolation => ApiError::ThresholdViolation, - } - } -} - -impl From for ApiError { - fn from(error: RemoveKeyFailure) -> Self { - match error { - RemoveKeyFailure::MissingKey => ApiError::MissingKey, - RemoveKeyFailure::PermissionDenied => ApiError::PermissionDenied, - RemoveKeyFailure::ThresholdViolation => ApiError::ThresholdViolation, - } - } -} - -impl From for ApiError { - fn from(error: SetThresholdFailure) -> Self { - match error { - SetThresholdFailure::KeyManagementThreshold => ApiError::KeyManagementThreshold, - SetThresholdFailure::DeploymentThreshold => ApiError::DeploymentThreshold, - SetThresholdFailure::PermissionDeniedError => ApiError::PermissionDenied, - SetThresholdFailure::InsufficientTotalWeight => ApiError::InsufficientTotalWeight, - } - } -} - -impl From for ApiError { - fn from(error: CLValueError) -> Self { - match error { - CLValueError::Serialization(bytesrepr_error) => bytesrepr_error.into(), - CLValueError::Type(_) => ApiError::CLTypeMismatch, - } - } -} - -impl From for ApiError { - fn from(error: contracts::Error) -> Self { - ApiError::ContractHeader(error as u8) - } -} - -impl From for ApiError { - fn from(error: auction::Error) -> Self { - ApiError::AuctionError(error as u8) - } -} - -// This conversion is not intended to be used by third party crates. -#[doc(hidden)] -impl From for ApiError { - fn from(_error: TryFromIntError) -> Self { - ApiError::Unhandled - } -} - -impl From for ApiError { - fn from(_error: TryFromSliceForAccountHashError) -> Self { - ApiError::Deserialize - } -} - -impl From for ApiError { - fn from(error: mint::Error) -> Self { - ApiError::Mint(error as u8) - } -} - -impl From for ApiError { - fn from(error: handle_payment::Error) -> Self { - ApiError::HandlePayment(error as u8) - } -} - -impl From for u32 { - fn from(error: ApiError) -> Self { - match error { - ApiError::None => 1, - ApiError::MissingArgument => 2, - ApiError::InvalidArgument => 3, - ApiError::Deserialize => 4, - ApiError::Read => 5, - ApiError::ValueNotFound => 6, - ApiError::ContractNotFound => 7, - ApiError::GetKey => 8, - ApiError::UnexpectedKeyVariant => 9, - ApiError::UnexpectedContractRefVariant => 10, - ApiError::InvalidPurseName => 11, - ApiError::InvalidPurse => 12, - ApiError::UpgradeContractAtURef => 13, - ApiError::Transfer => 14, - ApiError::NoAccessRights => 15, - ApiError::CLTypeMismatch => 16, - ApiError::EarlyEndOfStream => 17, - ApiError::Formatting => 18, - ApiError::LeftOverBytes => 19, - ApiError::OutOfMemory => 20, - ApiError::MaxKeysLimit => 21, - ApiError::DuplicateKey => 22, - ApiError::PermissionDenied => 23, - ApiError::MissingKey => 24, - ApiError::ThresholdViolation => 25, - ApiError::KeyManagementThreshold => 26, - ApiError::DeploymentThreshold => 27, - ApiError::InsufficientTotalWeight => 28, - ApiError::InvalidSystemContract => 29, - ApiError::PurseNotCreated => 30, - ApiError::Unhandled => 31, - ApiError::BufferTooSmall => 32, - ApiError::HostBufferEmpty => 33, - ApiError::HostBufferFull => 34, - ApiError::AllocLayout => 35, - ApiError::DictionaryItemKeyExceedsLength => 36, - ApiError::InvalidDictionaryItemKey => 37, - ApiError::MissingSystemContractHash => 38, - ApiError::ExceededRecursionDepth => 39, - ApiError::NonRepresentableSerialization => 40, - ApiError::AuctionError(value) => AUCTION_ERROR_OFFSET + u32::from(value), - ApiError::ContractHeader(value) => HEADER_ERROR_OFFSET + u32::from(value), - ApiError::Mint(value) => MINT_ERROR_OFFSET + u32::from(value), - ApiError::HandlePayment(value) => POS_ERROR_OFFSET + u32::from(value), - ApiError::User(value) => RESERVED_ERROR_MAX + 1 + u32::from(value), - } - } -} - -impl From for ApiError { - fn from(value: u32) -> ApiError { - match value { - 1 => ApiError::None, - 2 => ApiError::MissingArgument, - 3 => ApiError::InvalidArgument, - 4 => ApiError::Deserialize, - 5 => ApiError::Read, - 6 => ApiError::ValueNotFound, - 7 => ApiError::ContractNotFound, - 8 => ApiError::GetKey, - 9 => ApiError::UnexpectedKeyVariant, - 10 => ApiError::UnexpectedContractRefVariant, - 11 => ApiError::InvalidPurseName, - 12 => ApiError::InvalidPurse, - 13 => ApiError::UpgradeContractAtURef, - 14 => ApiError::Transfer, - 15 => ApiError::NoAccessRights, - 16 => ApiError::CLTypeMismatch, - 17 => ApiError::EarlyEndOfStream, - 18 => ApiError::Formatting, - 19 => ApiError::LeftOverBytes, - 20 => ApiError::OutOfMemory, - 21 => ApiError::MaxKeysLimit, - 22 => ApiError::DuplicateKey, - 23 => ApiError::PermissionDenied, - 24 => ApiError::MissingKey, - 25 => ApiError::ThresholdViolation, - 26 => ApiError::KeyManagementThreshold, - 27 => ApiError::DeploymentThreshold, - 28 => ApiError::InsufficientTotalWeight, - 29 => ApiError::InvalidSystemContract, - 30 => ApiError::PurseNotCreated, - 31 => ApiError::Unhandled, - 32 => ApiError::BufferTooSmall, - 33 => ApiError::HostBufferEmpty, - 34 => ApiError::HostBufferFull, - 35 => ApiError::AllocLayout, - 36 => ApiError::DictionaryItemKeyExceedsLength, - 37 => ApiError::InvalidDictionaryItemKey, - 38 => ApiError::MissingSystemContractHash, - 39 => ApiError::ExceededRecursionDepth, - 40 => ApiError::NonRepresentableSerialization, - USER_ERROR_MIN..=USER_ERROR_MAX => ApiError::User(value as u16), - HP_ERROR_MIN..=HP_ERROR_MAX => ApiError::HandlePayment(value as u8), - MINT_ERROR_MIN..=MINT_ERROR_MAX => ApiError::Mint(value as u8), - HEADER_ERROR_MIN..=HEADER_ERROR_MAX => ApiError::ContractHeader(value as u8), - AUCTION_ERROR_MIN..=AUCTION_ERROR_MAX => ApiError::AuctionError(value as u8), - _ => ApiError::Unhandled, - } - } -} - -impl Debug for ApiError { - fn fmt(&self, f: &mut Formatter) -> fmt::Result { - match self { - ApiError::None => write!(f, "ApiError::None")?, - ApiError::MissingArgument => write!(f, "ApiError::MissingArgument")?, - ApiError::InvalidArgument => write!(f, "ApiError::InvalidArgument")?, - ApiError::Deserialize => write!(f, "ApiError::Deserialize")?, - ApiError::Read => write!(f, "ApiError::Read")?, - ApiError::ValueNotFound => write!(f, "ApiError::ValueNotFound")?, - ApiError::ContractNotFound => write!(f, "ApiError::ContractNotFound")?, - ApiError::GetKey => write!(f, "ApiError::GetKey")?, - ApiError::UnexpectedKeyVariant => write!(f, "ApiError::UnexpectedKeyVariant")?, - ApiError::UnexpectedContractRefVariant => { - write!(f, "ApiError::UnexpectedContractRefVariant")? - } - ApiError::InvalidPurseName => write!(f, "ApiError::InvalidPurseName")?, - ApiError::InvalidPurse => write!(f, "ApiError::InvalidPurse")?, - ApiError::UpgradeContractAtURef => write!(f, "ApiError::UpgradeContractAtURef")?, - ApiError::Transfer => write!(f, "ApiError::Transfer")?, - ApiError::NoAccessRights => write!(f, "ApiError::NoAccessRights")?, - ApiError::CLTypeMismatch => write!(f, "ApiError::CLTypeMismatch")?, - ApiError::EarlyEndOfStream => write!(f, "ApiError::EarlyEndOfStream")?, - ApiError::Formatting => write!(f, "ApiError::Formatting")?, - ApiError::LeftOverBytes => write!(f, "ApiError::LeftOverBytes")?, - ApiError::OutOfMemory => write!(f, "ApiError::OutOfMemory")?, - ApiError::MaxKeysLimit => write!(f, "ApiError::MaxKeysLimit")?, - ApiError::DuplicateKey => write!(f, "ApiError::DuplicateKey")?, - ApiError::PermissionDenied => write!(f, "ApiError::PermissionDenied")?, - ApiError::MissingKey => write!(f, "ApiError::MissingKey")?, - ApiError::ThresholdViolation => write!(f, "ApiError::ThresholdViolation")?, - ApiError::KeyManagementThreshold => write!(f, "ApiError::KeyManagementThreshold")?, - ApiError::DeploymentThreshold => write!(f, "ApiError::DeploymentThreshold")?, - ApiError::InsufficientTotalWeight => write!(f, "ApiError::InsufficientTotalWeight")?, - ApiError::InvalidSystemContract => write!(f, "ApiError::InvalidSystemContract")?, - ApiError::PurseNotCreated => write!(f, "ApiError::PurseNotCreated")?, - ApiError::Unhandled => write!(f, "ApiError::Unhandled")?, - ApiError::BufferTooSmall => write!(f, "ApiError::BufferTooSmall")?, - ApiError::HostBufferEmpty => write!(f, "ApiError::HostBufferEmpty")?, - ApiError::HostBufferFull => write!(f, "ApiError::HostBufferFull")?, - ApiError::AllocLayout => write!(f, "ApiError::AllocLayout")?, - ApiError::DictionaryItemKeyExceedsLength => { - write!(f, "ApiError::DictionaryItemKeyTooLarge")? - } - ApiError::InvalidDictionaryItemKey => write!(f, "ApiError::InvalidDictionaryItemKey")?, - ApiError::MissingSystemContractHash => write!(f, "ApiError::MissingContractHash")?, - ApiError::NonRepresentableSerialization => { - write!(f, "ApiError::NonRepresentableSerialization")? - } - ApiError::ExceededRecursionDepth => write!(f, "ApiError::ExceededRecursionDepth")?, - ApiError::AuctionError(value) => write!( - f, - "ApiError::AuctionError({:?})", - auction::Error::try_from(*value).map_err(|_err| fmt::Error)? - )?, - ApiError::ContractHeader(value) => write!( - f, - "ApiError::ContractHeader({:?})", - contracts::Error::try_from(*value).map_err(|_err| fmt::Error)? - )?, - ApiError::Mint(value) => write!( - f, - "ApiError::Mint({:?})", - mint::Error::try_from(*value).map_err(|_err| fmt::Error)? - )?, - ApiError::HandlePayment(value) => write!( - f, - "ApiError::HandlePayment({:?})", - handle_payment::Error::try_from(*value).map_err(|_err| fmt::Error)? - )?, - ApiError::User(value) => write!(f, "ApiError::User({})", value)?, - } - write!(f, " [{}]", u32::from(*self)) - } -} - -impl fmt::Display for ApiError { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - ApiError::User(value) => write!(f, "User error: {}", value), - ApiError::ContractHeader(value) => write!(f, "Contract header error: {}", value), - ApiError::Mint(value) => write!(f, "Mint error: {}", value), - ApiError::HandlePayment(value) => write!(f, "Handle Payment error: {}", value), - _ => ::fmt(self, f), - } - } -} - -// This function is not intended to be used by third party crates. -#[doc(hidden)] -pub fn i32_from(result: Result<(), T>) -> i32 -where - ApiError: From, -{ - match result { - Ok(()) => 0, - Err(error) => { - let api_error = ApiError::from(error); - u32::from(api_error) as i32 - } - } -} - -/// Converts an `i32` to a `Result<(), ApiError>`, where `0` represents `Ok(())`, and all other -/// inputs are mapped to `Err(ApiError::)`. The full list of mappings can be found in the -/// [docs for `ApiError`](ApiError#mappings). -pub fn result_from(value: i32) -> Result<(), ApiError> { - match value { - 0 => Ok(()), - _ => Err(ApiError::from(value as u32)), - } -} - -#[cfg(test)] -mod tests { - use std::{i32, u16, u8}; - - use super::*; - - fn round_trip(result: Result<(), ApiError>) { - let code = i32_from(result); - assert_eq!(result, result_from(code)); - } - - #[test] - fn error_values() { - assert_eq!(65_024_u32, u32::from(ApiError::Mint(0))); // MINT_ERROR_OFFSET == 65,024 - assert_eq!(65_279_u32, u32::from(ApiError::Mint(u8::MAX))); - assert_eq!(65_280_u32, u32::from(ApiError::HandlePayment(0))); // POS_ERROR_OFFSET == 65,280 - assert_eq!(65_535_u32, u32::from(ApiError::HandlePayment(u8::MAX))); - assert_eq!(65_536_u32, u32::from(ApiError::User(0))); // u16::MAX + 1 - assert_eq!(131_071_u32, u32::from(ApiError::User(u16::MAX))); // 2 * u16::MAX + 1 - } - - #[test] - fn error_descriptions_getkey() { - assert_eq!("ApiError::GetKey [8]", &format!("{:?}", ApiError::GetKey)); - assert_eq!("ApiError::GetKey [8]", &format!("{}", ApiError::GetKey)); - } - - #[test] - fn error_descriptions_contract_header() { - assert_eq!( - "ApiError::ContractHeader(PreviouslyUsedVersion) [64769]", - &format!( - "{:?}", - ApiError::ContractHeader(contracts::Error::PreviouslyUsedVersion as u8) - ) - ); - assert_eq!( - "Contract header error: 0", - &format!("{}", ApiError::ContractHeader(0)) - ); - assert_eq!( - "Contract header error: 255", - &format!("{}", ApiError::ContractHeader(u8::MAX)) - ); - } - - #[test] - fn error_descriptions_mint() { - assert_eq!( - "ApiError::Mint(InsufficientFunds) [65024]", - &format!("{:?}", ApiError::Mint(0)) - ); - assert_eq!("Mint error: 0", &format!("{}", ApiError::Mint(0))); - assert_eq!("Mint error: 255", &format!("{}", ApiError::Mint(u8::MAX))); - } - - #[test] - fn error_descriptions_handle_payment() { - assert_eq!( - "ApiError::HandlePayment(NotBonded) [65280]", - &format!( - "{:?}", - ApiError::HandlePayment(handle_payment::Error::NotBonded as u8) - ) - ); - } - #[test] - fn error_descriptions_handle_payment_display() { - assert_eq!( - "Handle Payment error: 0", - &format!( - "{}", - ApiError::HandlePayment(handle_payment::Error::NotBonded as u8) - ) - ); - } - - #[test] - fn error_descriptions_user_errors() { - assert_eq!( - "ApiError::User(0) [65536]", - &format!("{:?}", ApiError::User(0)) - ); - - assert_eq!("User error: 0", &format!("{}", ApiError::User(0))); - assert_eq!( - "ApiError::User(65535) [131071]", - &format!("{:?}", ApiError::User(u16::MAX)) - ); - assert_eq!( - "User error: 65535", - &format!("{}", ApiError::User(u16::MAX)) - ); - } - - #[test] - fn error_edge_cases() { - assert_eq!(Err(ApiError::Unhandled), result_from(i32::MAX)); - assert_eq!( - Err(ApiError::ContractHeader(255)), - result_from(MINT_ERROR_OFFSET as i32 - 1) - ); - assert_eq!(Err(ApiError::Unhandled), result_from(-1)); - assert_eq!(Err(ApiError::Unhandled), result_from(i32::MIN)); - } - - #[test] - fn error_round_trips() { - round_trip(Ok(())); - round_trip(Err(ApiError::None)); - round_trip(Err(ApiError::MissingArgument)); - round_trip(Err(ApiError::InvalidArgument)); - round_trip(Err(ApiError::Deserialize)); - round_trip(Err(ApiError::Read)); - round_trip(Err(ApiError::ValueNotFound)); - round_trip(Err(ApiError::ContractNotFound)); - round_trip(Err(ApiError::GetKey)); - round_trip(Err(ApiError::UnexpectedKeyVariant)); - round_trip(Err(ApiError::UnexpectedContractRefVariant)); - round_trip(Err(ApiError::InvalidPurseName)); - round_trip(Err(ApiError::InvalidPurse)); - round_trip(Err(ApiError::UpgradeContractAtURef)); - round_trip(Err(ApiError::Transfer)); - round_trip(Err(ApiError::NoAccessRights)); - round_trip(Err(ApiError::CLTypeMismatch)); - round_trip(Err(ApiError::EarlyEndOfStream)); - round_trip(Err(ApiError::Formatting)); - round_trip(Err(ApiError::LeftOverBytes)); - round_trip(Err(ApiError::OutOfMemory)); - round_trip(Err(ApiError::MaxKeysLimit)); - round_trip(Err(ApiError::DuplicateKey)); - round_trip(Err(ApiError::PermissionDenied)); - round_trip(Err(ApiError::MissingKey)); - round_trip(Err(ApiError::ThresholdViolation)); - round_trip(Err(ApiError::KeyManagementThreshold)); - round_trip(Err(ApiError::DeploymentThreshold)); - round_trip(Err(ApiError::InsufficientTotalWeight)); - round_trip(Err(ApiError::InvalidSystemContract)); - round_trip(Err(ApiError::PurseNotCreated)); - round_trip(Err(ApiError::Unhandled)); - round_trip(Err(ApiError::BufferTooSmall)); - round_trip(Err(ApiError::HostBufferEmpty)); - round_trip(Err(ApiError::HostBufferFull)); - round_trip(Err(ApiError::AllocLayout)); - round_trip(Err(ApiError::NonRepresentableSerialization)); - round_trip(Err(ApiError::ContractHeader(0))); - round_trip(Err(ApiError::ContractHeader(u8::MAX))); - round_trip(Err(ApiError::Mint(0))); - round_trip(Err(ApiError::Mint(u8::MAX))); - round_trip(Err(ApiError::HandlePayment(0))); - round_trip(Err(ApiError::HandlePayment(u8::MAX))); - round_trip(Err(ApiError::User(0))); - round_trip(Err(ApiError::User(u16::MAX))); - round_trip(Err(ApiError::AuctionError(0))); - round_trip(Err(ApiError::AuctionError(u8::MAX))); - } -} diff --git a/casper_types/src/block_time.rs b/casper_types/src/block_time.rs deleted file mode 100644 index 4122f7ca..00000000 --- a/casper_types/src/block_time.rs +++ /dev/null @@ -1,47 +0,0 @@ -use alloc::vec::Vec; - -use crate::bytesrepr::{Error, FromBytes, ToBytes, U64_SERIALIZED_LENGTH}; - -/// The number of bytes in a serialized [`BlockTime`]. -pub const BLOCKTIME_SERIALIZED_LENGTH: usize = U64_SERIALIZED_LENGTH; - -/// A newtype wrapping a [`u64`] which represents the block time. -#[derive(Clone, Copy, Default, Debug, PartialEq, Eq, PartialOrd)] -pub struct BlockTime(u64); - -impl BlockTime { - /// Constructs a `BlockTime`. - pub fn new(value: u64) -> Self { - BlockTime(value) - } - - /// Saturating integer subtraction. Computes `self - other`, saturating at `0` instead of - /// overflowing. - #[must_use] - pub fn saturating_sub(self, other: BlockTime) -> Self { - BlockTime(self.0.saturating_sub(other.0)) - } -} - -impl From for u64 { - fn from(blocktime: BlockTime) -> Self { - blocktime.0 - } -} - -impl ToBytes for BlockTime { - fn to_bytes(&self) -> Result, Error> { - self.0.to_bytes() - } - - fn serialized_length(&self) -> usize { - BLOCKTIME_SERIALIZED_LENGTH - } -} - -impl FromBytes for BlockTime { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (time, rem) = FromBytes::from_bytes(bytes)?; - Ok((BlockTime::new(time), rem)) - } -} diff --git a/casper_types/src/bytesrepr.rs b/casper_types/src/bytesrepr.rs deleted file mode 100644 index 136dd19a..00000000 --- a/casper_types/src/bytesrepr.rs +++ /dev/null @@ -1,1594 +0,0 @@ -//! Contains serialization and deserialization code for types used throughout the system. -mod bytes; - -use alloc::{ - alloc::{alloc, Layout}, - collections::{BTreeMap, BTreeSet, VecDeque}, - str, - string::String, - vec, - vec::Vec, -}; -#[cfg(debug_assertions)] -use core::any; -use core::{ - convert::TryInto, - fmt::{self, Display, Formatter}, - mem, - ptr::NonNull, -}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -use num_integer::Integer; -use num_rational::Ratio; -use serde::{Deserialize, Serialize}; - -pub use bytes::Bytes; - -/// The number of bytes in a serialized `()`. -pub const UNIT_SERIALIZED_LENGTH: usize = 0; -/// The number of bytes in a serialized `bool`. -pub const BOOL_SERIALIZED_LENGTH: usize = 1; -/// The number of bytes in a serialized `i32`. -pub const I32_SERIALIZED_LENGTH: usize = mem::size_of::(); -/// The number of bytes in a serialized `i64`. -pub const I64_SERIALIZED_LENGTH: usize = mem::size_of::(); -/// The number of bytes in a serialized `u8`. -pub const U8_SERIALIZED_LENGTH: usize = mem::size_of::(); -/// The number of bytes in a serialized `u16`. -pub const U16_SERIALIZED_LENGTH: usize = mem::size_of::(); -/// The number of bytes in a serialized `u32`. -pub const U32_SERIALIZED_LENGTH: usize = mem::size_of::(); -/// The number of bytes in a serialized `u64`. -pub const U64_SERIALIZED_LENGTH: usize = mem::size_of::(); -/// The number of bytes in a serialized [`U128`](crate::U128). -pub const U128_SERIALIZED_LENGTH: usize = mem::size_of::(); -/// The number of bytes in a serialized [`U256`](crate::U256). -pub const U256_SERIALIZED_LENGTH: usize = U128_SERIALIZED_LENGTH * 2; -/// The number of bytes in a serialized [`U512`](crate::U512). -pub const U512_SERIALIZED_LENGTH: usize = U256_SERIALIZED_LENGTH * 2; -/// The tag representing a `None` value. -pub const OPTION_NONE_TAG: u8 = 0; -/// The tag representing a `Some` value. -pub const OPTION_SOME_TAG: u8 = 1; -/// The tag representing an `Err` value. -pub const RESULT_ERR_TAG: u8 = 0; -/// The tag representing an `Ok` value. -pub const RESULT_OK_TAG: u8 = 1; - -/// A type which can be serialized to a `Vec`. -pub trait ToBytes { - /// Serializes `&self` to a `Vec`. - fn to_bytes(&self) -> Result, Error>; - /// Consumes `self` and serializes to a `Vec`. - fn into_bytes(self) -> Result, Error> - where - Self: Sized, - { - self.to_bytes() - } - /// Returns the length of the `Vec` which would be returned from a successful call to - /// `to_bytes()` or `into_bytes()`. The data is not actually serialized, so this call is - /// relatively cheap. - fn serialized_length(&self) -> usize; - - /// Writes `&self` into a mutable `writer`. - fn write_bytes(&self, writer: &mut Vec) -> Result<(), Error> { - writer.extend(self.to_bytes()?); - Ok(()) - } -} - -/// A type which can be deserialized from a `Vec`. -pub trait FromBytes: Sized { - /// Deserializes the slice into `Self`. - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error>; - - /// Deserializes the `Vec` into `Self`. - fn from_vec(bytes: Vec) -> Result<(Self, Vec), Error> { - Self::from_bytes(bytes.as_slice()).map(|(x, remainder)| (x, Vec::from(remainder))) - } -} - -/// Returns a `Vec` initialized with sufficient capacity to hold `to_be_serialized` after -/// serialization. -pub fn unchecked_allocate_buffer(to_be_serialized: &T) -> Vec { - let serialized_length = to_be_serialized.serialized_length(); - Vec::with_capacity(serialized_length) -} - -/// Returns a `Vec` initialized with sufficient capacity to hold `to_be_serialized` after -/// serialization, or an error if the capacity would exceed `u32::max_value()`. -pub fn allocate_buffer(to_be_serialized: &T) -> Result, Error> { - let serialized_length = to_be_serialized.serialized_length(); - if serialized_length > u32::max_value() as usize { - return Err(Error::OutOfMemory); - } - Ok(Vec::with_capacity(serialized_length)) -} - -/// Serialization and deserialization errors. -#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[repr(u8)] -#[non_exhaustive] -pub enum Error { - /// Early end of stream while deserializing. - EarlyEndOfStream = 0, - /// Formatting error while deserializing. - Formatting, - /// Not all input bytes were consumed in [`deserialize`]. - LeftOverBytes, - /// Out of memory error. - OutOfMemory, - /// No serialized representation is available for a value. - NotRepresentable, - /// Exceeded a recursion depth limit. - ExceededRecursionDepth, -} - -impl Display for Error { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - match self { - Error::EarlyEndOfStream => { - formatter.write_str("Deserialization error: early end of stream") - } - Error::Formatting => formatter.write_str("Deserialization error: formatting"), - Error::LeftOverBytes => formatter.write_str("Deserialization error: left-over bytes"), - Error::OutOfMemory => formatter.write_str("Serialization error: out of memory"), - Error::NotRepresentable => { - formatter.write_str("Serialization error: value is not representable.") - } - Error::ExceededRecursionDepth => formatter.write_str("exceeded recursion depth"), - } - } -} - -/// Deserializes `bytes` into an instance of `T`. -/// -/// Returns an error if the bytes cannot be deserialized into `T` or if not all of the input bytes -/// are consumed in the operation. -pub fn deserialize(bytes: Vec) -> Result { - let (t, remainder) = T::from_bytes(&bytes)?; - if remainder.is_empty() { - Ok(t) - } else { - Err(Error::LeftOverBytes) - } -} - -/// Deserializes a slice of bytes into an instance of `T`. -/// -/// Returns an error if the bytes cannot be deserialized into `T` or if not all of the input bytes -/// are consumed in the operation. -pub fn deserialize_from_slice, O: FromBytes>(bytes: I) -> Result { - let (t, remainder) = O::from_bytes(bytes.as_ref())?; - if remainder.is_empty() { - Ok(t) - } else { - Err(Error::LeftOverBytes) - } -} - -/// Serializes `t` into a `Vec`. -pub fn serialize(t: impl ToBytes) -> Result, Error> { - t.into_bytes() -} - -/// Safely splits the slice at the given point. -pub(crate) fn safe_split_at(bytes: &[u8], n: usize) -> Result<(&[u8], &[u8]), Error> { - if n > bytes.len() { - Err(Error::EarlyEndOfStream) - } else { - Ok(bytes.split_at(n)) - } -} - -impl ToBytes for () { - fn to_bytes(&self) -> Result, Error> { - Ok(Vec::new()) - } - - fn serialized_length(&self) -> usize { - UNIT_SERIALIZED_LENGTH - } -} - -impl FromBytes for () { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - Ok(((), bytes)) - } -} - -impl ToBytes for bool { - fn to_bytes(&self) -> Result, Error> { - u8::from(*self).to_bytes() - } - - fn serialized_length(&self) -> usize { - BOOL_SERIALIZED_LENGTH - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), Error> { - writer.push(*self as u8); - Ok(()) - } -} - -impl FromBytes for bool { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - match bytes.split_first() { - None => Err(Error::EarlyEndOfStream), - Some((byte, rem)) => match byte { - 1 => Ok((true, rem)), - 0 => Ok((false, rem)), - _ => Err(Error::Formatting), - }, - } - } -} - -impl ToBytes for u8 { - fn to_bytes(&self) -> Result, Error> { - Ok(vec![*self]) - } - - fn serialized_length(&self) -> usize { - U8_SERIALIZED_LENGTH - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), Error> { - writer.push(*self); - Ok(()) - } -} - -impl FromBytes for u8 { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - match bytes.split_first() { - None => Err(Error::EarlyEndOfStream), - Some((byte, rem)) => Ok((*byte, rem)), - } - } -} - -impl ToBytes for i32 { - fn to_bytes(&self) -> Result, Error> { - Ok(self.to_le_bytes().to_vec()) - } - - fn serialized_length(&self) -> usize { - I32_SERIALIZED_LENGTH - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), Error> { - writer.extend_from_slice(&self.to_le_bytes()); - Ok(()) - } -} - -impl FromBytes for i32 { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let mut result = [0u8; I32_SERIALIZED_LENGTH]; - let (bytes, remainder) = safe_split_at(bytes, I32_SERIALIZED_LENGTH)?; - result.copy_from_slice(bytes); - Ok((::from_le_bytes(result), remainder)) - } -} - -impl ToBytes for i64 { - fn to_bytes(&self) -> Result, Error> { - Ok(self.to_le_bytes().to_vec()) - } - - fn serialized_length(&self) -> usize { - I64_SERIALIZED_LENGTH - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), Error> { - writer.extend_from_slice(&self.to_le_bytes()); - Ok(()) - } -} - -impl FromBytes for i64 { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let mut result = [0u8; I64_SERIALIZED_LENGTH]; - let (bytes, remainder) = safe_split_at(bytes, I64_SERIALIZED_LENGTH)?; - result.copy_from_slice(bytes); - Ok((::from_le_bytes(result), remainder)) - } -} - -impl ToBytes for u16 { - fn to_bytes(&self) -> Result, Error> { - Ok(self.to_le_bytes().to_vec()) - } - - fn serialized_length(&self) -> usize { - U16_SERIALIZED_LENGTH - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), Error> { - writer.extend_from_slice(&self.to_le_bytes()); - Ok(()) - } -} - -impl FromBytes for u16 { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let mut result = [0u8; U16_SERIALIZED_LENGTH]; - let (bytes, remainder) = safe_split_at(bytes, U16_SERIALIZED_LENGTH)?; - result.copy_from_slice(bytes); - Ok((::from_le_bytes(result), remainder)) - } -} - -impl ToBytes for u32 { - fn to_bytes(&self) -> Result, Error> { - Ok(self.to_le_bytes().to_vec()) - } - - fn serialized_length(&self) -> usize { - U32_SERIALIZED_LENGTH - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), Error> { - writer.extend_from_slice(&self.to_le_bytes()); - Ok(()) - } -} - -impl FromBytes for u32 { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let mut result = [0u8; U32_SERIALIZED_LENGTH]; - let (bytes, remainder) = safe_split_at(bytes, U32_SERIALIZED_LENGTH)?; - result.copy_from_slice(bytes); - Ok((::from_le_bytes(result), remainder)) - } -} - -impl ToBytes for u64 { - fn to_bytes(&self) -> Result, Error> { - Ok(self.to_le_bytes().to_vec()) - } - - fn serialized_length(&self) -> usize { - U64_SERIALIZED_LENGTH - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), Error> { - writer.extend_from_slice(&self.to_le_bytes()); - Ok(()) - } -} - -impl FromBytes for u64 { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let mut result = [0u8; U64_SERIALIZED_LENGTH]; - let (bytes, remainder) = safe_split_at(bytes, U64_SERIALIZED_LENGTH)?; - result.copy_from_slice(bytes); - Ok((::from_le_bytes(result), remainder)) - } -} - -impl ToBytes for String { - fn to_bytes(&self) -> Result, Error> { - let bytes = self.as_bytes(); - u8_slice_to_bytes(bytes) - } - - fn serialized_length(&self) -> usize { - u8_slice_serialized_length(self.as_bytes()) - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), Error> { - write_u8_slice(self.as_bytes(), writer)?; - Ok(()) - } -} - -impl FromBytes for String { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (size, remainder) = u32::from_bytes(bytes)?; - let (str_bytes, remainder) = safe_split_at(remainder, size as usize)?; - let result = String::from_utf8(str_bytes.to_vec()).map_err(|_| Error::Formatting)?; - Ok((result, remainder)) - } -} - -fn ensure_efficient_serialization() { - #[cfg(debug_assertions)] - debug_assert_ne!( - any::type_name::(), - any::type_name::(), - "You should use Bytes newtype wrapper for efficiency" - ); -} - -fn iterator_serialized_length<'a, T: 'a + ToBytes>(ts: impl Iterator) -> usize { - U32_SERIALIZED_LENGTH + ts.map(ToBytes::serialized_length).sum::() -} - -impl ToBytes for Vec { - fn to_bytes(&self) -> Result, Error> { - ensure_efficient_serialization::(); - - let mut result = try_vec_with_capacity(self.serialized_length())?; - let length_32: u32 = self.len().try_into().map_err(|_| Error::NotRepresentable)?; - result.append(&mut length_32.to_bytes()?); - - for item in self.iter() { - result.append(&mut item.to_bytes()?); - } - - Ok(result) - } - - fn into_bytes(self) -> Result, Error> { - ensure_efficient_serialization::(); - - let mut result = allocate_buffer(&self)?; - let length_32: u32 = self.len().try_into().map_err(|_| Error::NotRepresentable)?; - result.append(&mut length_32.to_bytes()?); - - for item in self { - result.append(&mut item.into_bytes()?); - } - - Ok(result) - } - - fn serialized_length(&self) -> usize { - iterator_serialized_length(self.iter()) - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), Error> { - let length_32: u32 = self.len().try_into().map_err(|_| Error::NotRepresentable)?; - writer.extend_from_slice(&length_32.to_le_bytes()); - for item in self.iter() { - item.write_bytes(writer)?; - } - Ok(()) - } -} - -// TODO Replace `try_vec_with_capacity` with `Vec::try_reserve_exact` once it's in stable. -fn try_vec_with_capacity(capacity: usize) -> Result, Error> { - // see https://doc.rust-lang.org/src/alloc/raw_vec.rs.html#75-98 - let elem_size = mem::size_of::(); - let alloc_size = capacity.checked_mul(elem_size).ok_or(Error::OutOfMemory)?; - - let ptr = if alloc_size == 0 { - NonNull::::dangling() - } else { - let align = mem::align_of::(); - let layout = Layout::from_size_align(alloc_size, align).map_err(|_| Error::OutOfMemory)?; - let raw_ptr = unsafe { alloc(layout) }; - let non_null_ptr = NonNull::::new(raw_ptr).ok_or(Error::OutOfMemory)?; - non_null_ptr.cast() - }; - unsafe { Ok(Vec::from_raw_parts(ptr.as_ptr(), 0, capacity)) } -} - -fn vec_from_vec(bytes: Vec) -> Result<(Vec, Vec), Error> { - ensure_efficient_serialization::(); - - Vec::::from_bytes(bytes.as_slice()).map(|(x, remainder)| (x, Vec::from(remainder))) -} - -impl FromBytes for Vec { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - ensure_efficient_serialization::(); - - let (count, mut stream) = u32::from_bytes(bytes)?; - - let mut result = try_vec_with_capacity(count as usize)?; - for _ in 0..count { - let (value, remainder) = T::from_bytes(stream)?; - result.push(value); - stream = remainder; - } - - Ok((result, stream)) - } - - fn from_vec(bytes: Vec) -> Result<(Self, Vec), Error> { - vec_from_vec(bytes) - } -} - -impl ToBytes for VecDeque { - fn to_bytes(&self) -> Result, Error> { - let (slice1, slice2) = self.as_slices(); - let mut result = allocate_buffer(self)?; - let length_32: u32 = self.len().try_into().map_err(|_| Error::NotRepresentable)?; - result.append(&mut length_32.to_bytes()?); - for item in slice1.iter().chain(slice2.iter()) { - result.append(&mut item.to_bytes()?); - } - Ok(result) - } - - fn into_bytes(self) -> Result, Error> { - let vec: Vec = self.into(); - vec.to_bytes() - } - - fn serialized_length(&self) -> usize { - let (slice1, slice2) = self.as_slices(); - iterator_serialized_length(slice1.iter().chain(slice2.iter())) - } -} - -impl FromBytes for VecDeque { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (vec, bytes) = Vec::from_bytes(bytes)?; - Ok((VecDeque::from(vec), bytes)) - } - - fn from_vec(bytes: Vec) -> Result<(Self, Vec), Error> { - let (vec, bytes) = vec_from_vec(bytes)?; - Ok((VecDeque::from(vec), bytes)) - } -} - -impl ToBytes for [u8; COUNT] { - #[inline(always)] - fn to_bytes(&self) -> Result, Error> { - Ok(self.to_vec()) - } - - #[inline(always)] - fn serialized_length(&self) -> usize { - COUNT - } - - #[inline(always)] - fn write_bytes(&self, writer: &mut Vec) -> Result<(), Error> { - writer.extend_from_slice(self); - Ok(()) - } -} - -impl FromBytes for [u8; COUNT] { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (bytes, rem) = safe_split_at(bytes, COUNT)?; - // SAFETY: safe_split_at makes sure `bytes` is exactly `COUNT` bytes. - let ptr = bytes.as_ptr() as *const [u8; COUNT]; - let result = unsafe { *ptr }; - Ok((result, rem)) - } -} - -impl ToBytes for BTreeSet { - fn to_bytes(&self) -> Result, Error> { - let mut result = allocate_buffer(self)?; - - let num_keys: u32 = self.len().try_into().map_err(|_| Error::NotRepresentable)?; - result.append(&mut num_keys.to_bytes()?); - - for value in self.iter() { - result.append(&mut value.to_bytes()?); - } - - Ok(result) - } - - fn serialized_length(&self) -> usize { - U32_SERIALIZED_LENGTH + self.iter().map(|v| v.serialized_length()).sum::() - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), Error> { - let length_32: u32 = self.len().try_into().map_err(|_| Error::NotRepresentable)?; - writer.extend_from_slice(&length_32.to_le_bytes()); - for value in self.iter() { - value.write_bytes(writer)?; - } - Ok(()) - } -} - -impl FromBytes for BTreeSet { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (num_keys, mut stream) = u32::from_bytes(bytes)?; - let mut result = BTreeSet::new(); - for _ in 0..num_keys { - let (v, rem) = V::from_bytes(stream)?; - result.insert(v); - stream = rem; - } - Ok((result, stream)) - } -} - -impl ToBytes for BTreeMap -where - K: ToBytes, - V: ToBytes, -{ - fn to_bytes(&self) -> Result, Error> { - let mut result = allocate_buffer(self)?; - - let num_keys: u32 = self.len().try_into().map_err(|_| Error::NotRepresentable)?; - result.append(&mut num_keys.to_bytes()?); - - for (key, value) in self.iter() { - result.append(&mut key.to_bytes()?); - result.append(&mut value.to_bytes()?); - } - - Ok(result) - } - - fn serialized_length(&self) -> usize { - U32_SERIALIZED_LENGTH - + self - .iter() - .map(|(key, value)| key.serialized_length() + value.serialized_length()) - .sum::() - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), Error> { - let length_32: u32 = self.len().try_into().map_err(|_| Error::NotRepresentable)?; - writer.extend_from_slice(&length_32.to_le_bytes()); - for (key, value) in self.iter() { - key.write_bytes(writer)?; - value.write_bytes(writer)?; - } - Ok(()) - } -} - -impl FromBytes for BTreeMap -where - K: FromBytes + Ord, - V: FromBytes, -{ - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (num_keys, mut stream) = u32::from_bytes(bytes)?; - let mut result = BTreeMap::new(); - for _ in 0..num_keys { - let (k, rem) = K::from_bytes(stream)?; - let (v, rem) = V::from_bytes(rem)?; - result.insert(k, v); - stream = rem; - } - Ok((result, stream)) - } -} - -impl ToBytes for Option { - fn to_bytes(&self) -> Result, Error> { - match self { - None => Ok(vec![OPTION_NONE_TAG]), - Some(v) => { - let mut result = allocate_buffer(self)?; - result.push(OPTION_SOME_TAG); - - let mut value = v.to_bytes()?; - result.append(&mut value); - - Ok(result) - } - } - } - - fn serialized_length(&self) -> usize { - U8_SERIALIZED_LENGTH - + match self { - Some(v) => v.serialized_length(), - None => 0, - } - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), Error> { - match self { - None => writer.push(OPTION_NONE_TAG), - Some(v) => { - writer.push(OPTION_SOME_TAG); - v.write_bytes(writer)?; - } - }; - Ok(()) - } -} - -impl FromBytes for Option { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (tag, rem) = u8::from_bytes(bytes)?; - match tag { - OPTION_NONE_TAG => Ok((None, rem)), - OPTION_SOME_TAG => { - let (t, rem) = T::from_bytes(rem)?; - Ok((Some(t), rem)) - } - _ => Err(Error::Formatting), - } - } -} - -impl ToBytes for Result { - fn to_bytes(&self) -> Result, Error> { - let mut result = allocate_buffer(self)?; - let (variant, mut value) = match self { - Err(error) => (RESULT_ERR_TAG, error.to_bytes()?), - Ok(result) => (RESULT_OK_TAG, result.to_bytes()?), - }; - result.push(variant); - result.append(&mut value); - Ok(result) - } - - fn serialized_length(&self) -> usize { - U8_SERIALIZED_LENGTH - + match self { - Ok(ok) => ok.serialized_length(), - Err(error) => error.serialized_length(), - } - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), Error> { - match self { - Err(error) => { - writer.push(RESULT_ERR_TAG); - error.write_bytes(writer)?; - } - Ok(result) => { - writer.push(RESULT_OK_TAG); - result.write_bytes(writer)?; - } - }; - Ok(()) - } -} - -impl FromBytes for Result { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (variant, rem) = u8::from_bytes(bytes)?; - match variant { - RESULT_ERR_TAG => { - let (value, rem) = E::from_bytes(rem)?; - Ok((Err(value), rem)) - } - RESULT_OK_TAG => { - let (value, rem) = T::from_bytes(rem)?; - Ok((Ok(value), rem)) - } - _ => Err(Error::Formatting), - } - } -} - -impl ToBytes for (T1,) { - fn to_bytes(&self) -> Result, Error> { - self.0.to_bytes() - } - - fn serialized_length(&self) -> usize { - self.0.serialized_length() - } -} - -impl FromBytes for (T1,) { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (t1, remainder) = T1::from_bytes(bytes)?; - Ok(((t1,), remainder)) - } -} - -impl ToBytes for (T1, T2) { - fn to_bytes(&self) -> Result, Error> { - let mut result = allocate_buffer(self)?; - result.append(&mut self.0.to_bytes()?); - result.append(&mut self.1.to_bytes()?); - Ok(result) - } - - fn serialized_length(&self) -> usize { - self.0.serialized_length() + self.1.serialized_length() - } -} - -impl FromBytes for (T1, T2) { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (t1, remainder) = T1::from_bytes(bytes)?; - let (t2, remainder) = T2::from_bytes(remainder)?; - Ok(((t1, t2), remainder)) - } -} - -impl ToBytes for (T1, T2, T3) { - fn to_bytes(&self) -> Result, Error> { - let mut result = allocate_buffer(self)?; - result.append(&mut self.0.to_bytes()?); - result.append(&mut self.1.to_bytes()?); - result.append(&mut self.2.to_bytes()?); - Ok(result) - } - - fn serialized_length(&self) -> usize { - self.0.serialized_length() + self.1.serialized_length() + self.2.serialized_length() - } -} - -impl FromBytes for (T1, T2, T3) { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (t1, remainder) = T1::from_bytes(bytes)?; - let (t2, remainder) = T2::from_bytes(remainder)?; - let (t3, remainder) = T3::from_bytes(remainder)?; - Ok(((t1, t2, t3), remainder)) - } -} - -impl ToBytes for (T1, T2, T3, T4) { - fn to_bytes(&self) -> Result, Error> { - let mut result = allocate_buffer(self)?; - result.append(&mut self.0.to_bytes()?); - result.append(&mut self.1.to_bytes()?); - result.append(&mut self.2.to_bytes()?); - result.append(&mut self.3.to_bytes()?); - Ok(result) - } - - fn serialized_length(&self) -> usize { - self.0.serialized_length() - + self.1.serialized_length() - + self.2.serialized_length() - + self.3.serialized_length() - } -} - -impl FromBytes for (T1, T2, T3, T4) { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (t1, remainder) = T1::from_bytes(bytes)?; - let (t2, remainder) = T2::from_bytes(remainder)?; - let (t3, remainder) = T3::from_bytes(remainder)?; - let (t4, remainder) = T4::from_bytes(remainder)?; - Ok(((t1, t2, t3, t4), remainder)) - } -} - -impl ToBytes - for (T1, T2, T3, T4, T5) -{ - fn to_bytes(&self) -> Result, Error> { - let mut result = allocate_buffer(self)?; - result.append(&mut self.0.to_bytes()?); - result.append(&mut self.1.to_bytes()?); - result.append(&mut self.2.to_bytes()?); - result.append(&mut self.3.to_bytes()?); - result.append(&mut self.4.to_bytes()?); - Ok(result) - } - - fn serialized_length(&self) -> usize { - self.0.serialized_length() - + self.1.serialized_length() - + self.2.serialized_length() - + self.3.serialized_length() - + self.4.serialized_length() - } -} - -impl FromBytes - for (T1, T2, T3, T4, T5) -{ - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (t1, remainder) = T1::from_bytes(bytes)?; - let (t2, remainder) = T2::from_bytes(remainder)?; - let (t3, remainder) = T3::from_bytes(remainder)?; - let (t4, remainder) = T4::from_bytes(remainder)?; - let (t5, remainder) = T5::from_bytes(remainder)?; - Ok(((t1, t2, t3, t4, t5), remainder)) - } -} - -impl ToBytes - for (T1, T2, T3, T4, T5, T6) -{ - fn to_bytes(&self) -> Result, Error> { - let mut result = allocate_buffer(self)?; - result.append(&mut self.0.to_bytes()?); - result.append(&mut self.1.to_bytes()?); - result.append(&mut self.2.to_bytes()?); - result.append(&mut self.3.to_bytes()?); - result.append(&mut self.4.to_bytes()?); - result.append(&mut self.5.to_bytes()?); - Ok(result) - } - - fn serialized_length(&self) -> usize { - self.0.serialized_length() - + self.1.serialized_length() - + self.2.serialized_length() - + self.3.serialized_length() - + self.4.serialized_length() - + self.5.serialized_length() - } -} - -impl - FromBytes for (T1, T2, T3, T4, T5, T6) -{ - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (t1, remainder) = T1::from_bytes(bytes)?; - let (t2, remainder) = T2::from_bytes(remainder)?; - let (t3, remainder) = T3::from_bytes(remainder)?; - let (t4, remainder) = T4::from_bytes(remainder)?; - let (t5, remainder) = T5::from_bytes(remainder)?; - let (t6, remainder) = T6::from_bytes(remainder)?; - Ok(((t1, t2, t3, t4, t5, t6), remainder)) - } -} - -impl - ToBytes for (T1, T2, T3, T4, T5, T6, T7) -{ - fn to_bytes(&self) -> Result, Error> { - let mut result = allocate_buffer(self)?; - result.append(&mut self.0.to_bytes()?); - result.append(&mut self.1.to_bytes()?); - result.append(&mut self.2.to_bytes()?); - result.append(&mut self.3.to_bytes()?); - result.append(&mut self.4.to_bytes()?); - result.append(&mut self.5.to_bytes()?); - result.append(&mut self.6.to_bytes()?); - Ok(result) - } - - fn serialized_length(&self) -> usize { - self.0.serialized_length() - + self.1.serialized_length() - + self.2.serialized_length() - + self.3.serialized_length() - + self.4.serialized_length() - + self.5.serialized_length() - + self.6.serialized_length() - } -} - -impl< - T1: FromBytes, - T2: FromBytes, - T3: FromBytes, - T4: FromBytes, - T5: FromBytes, - T6: FromBytes, - T7: FromBytes, - > FromBytes for (T1, T2, T3, T4, T5, T6, T7) -{ - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (t1, remainder) = T1::from_bytes(bytes)?; - let (t2, remainder) = T2::from_bytes(remainder)?; - let (t3, remainder) = T3::from_bytes(remainder)?; - let (t4, remainder) = T4::from_bytes(remainder)?; - let (t5, remainder) = T5::from_bytes(remainder)?; - let (t6, remainder) = T6::from_bytes(remainder)?; - let (t7, remainder) = T7::from_bytes(remainder)?; - Ok(((t1, t2, t3, t4, t5, t6, t7), remainder)) - } -} - -impl< - T1: ToBytes, - T2: ToBytes, - T3: ToBytes, - T4: ToBytes, - T5: ToBytes, - T6: ToBytes, - T7: ToBytes, - T8: ToBytes, - > ToBytes for (T1, T2, T3, T4, T5, T6, T7, T8) -{ - fn to_bytes(&self) -> Result, Error> { - let mut result = allocate_buffer(self)?; - result.append(&mut self.0.to_bytes()?); - result.append(&mut self.1.to_bytes()?); - result.append(&mut self.2.to_bytes()?); - result.append(&mut self.3.to_bytes()?); - result.append(&mut self.4.to_bytes()?); - result.append(&mut self.5.to_bytes()?); - result.append(&mut self.6.to_bytes()?); - result.append(&mut self.7.to_bytes()?); - Ok(result) - } - - fn serialized_length(&self) -> usize { - self.0.serialized_length() - + self.1.serialized_length() - + self.2.serialized_length() - + self.3.serialized_length() - + self.4.serialized_length() - + self.5.serialized_length() - + self.6.serialized_length() - + self.7.serialized_length() - } -} - -impl< - T1: FromBytes, - T2: FromBytes, - T3: FromBytes, - T4: FromBytes, - T5: FromBytes, - T6: FromBytes, - T7: FromBytes, - T8: FromBytes, - > FromBytes for (T1, T2, T3, T4, T5, T6, T7, T8) -{ - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (t1, remainder) = T1::from_bytes(bytes)?; - let (t2, remainder) = T2::from_bytes(remainder)?; - let (t3, remainder) = T3::from_bytes(remainder)?; - let (t4, remainder) = T4::from_bytes(remainder)?; - let (t5, remainder) = T5::from_bytes(remainder)?; - let (t6, remainder) = T6::from_bytes(remainder)?; - let (t7, remainder) = T7::from_bytes(remainder)?; - let (t8, remainder) = T8::from_bytes(remainder)?; - Ok(((t1, t2, t3, t4, t5, t6, t7, t8), remainder)) - } -} - -impl< - T1: ToBytes, - T2: ToBytes, - T3: ToBytes, - T4: ToBytes, - T5: ToBytes, - T6: ToBytes, - T7: ToBytes, - T8: ToBytes, - T9: ToBytes, - > ToBytes for (T1, T2, T3, T4, T5, T6, T7, T8, T9) -{ - fn to_bytes(&self) -> Result, Error> { - let mut result = allocate_buffer(self)?; - result.append(&mut self.0.to_bytes()?); - result.append(&mut self.1.to_bytes()?); - result.append(&mut self.2.to_bytes()?); - result.append(&mut self.3.to_bytes()?); - result.append(&mut self.4.to_bytes()?); - result.append(&mut self.5.to_bytes()?); - result.append(&mut self.6.to_bytes()?); - result.append(&mut self.7.to_bytes()?); - result.append(&mut self.8.to_bytes()?); - Ok(result) - } - - fn serialized_length(&self) -> usize { - self.0.serialized_length() - + self.1.serialized_length() - + self.2.serialized_length() - + self.3.serialized_length() - + self.4.serialized_length() - + self.5.serialized_length() - + self.6.serialized_length() - + self.7.serialized_length() - + self.8.serialized_length() - } -} - -impl< - T1: FromBytes, - T2: FromBytes, - T3: FromBytes, - T4: FromBytes, - T5: FromBytes, - T6: FromBytes, - T7: FromBytes, - T8: FromBytes, - T9: FromBytes, - > FromBytes for (T1, T2, T3, T4, T5, T6, T7, T8, T9) -{ - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (t1, remainder) = T1::from_bytes(bytes)?; - let (t2, remainder) = T2::from_bytes(remainder)?; - let (t3, remainder) = T3::from_bytes(remainder)?; - let (t4, remainder) = T4::from_bytes(remainder)?; - let (t5, remainder) = T5::from_bytes(remainder)?; - let (t6, remainder) = T6::from_bytes(remainder)?; - let (t7, remainder) = T7::from_bytes(remainder)?; - let (t8, remainder) = T8::from_bytes(remainder)?; - let (t9, remainder) = T9::from_bytes(remainder)?; - Ok(((t1, t2, t3, t4, t5, t6, t7, t8, t9), remainder)) - } -} - -impl< - T1: ToBytes, - T2: ToBytes, - T3: ToBytes, - T4: ToBytes, - T5: ToBytes, - T6: ToBytes, - T7: ToBytes, - T8: ToBytes, - T9: ToBytes, - T10: ToBytes, - > ToBytes for (T1, T2, T3, T4, T5, T6, T7, T8, T9, T10) -{ - fn to_bytes(&self) -> Result, Error> { - let mut result = allocate_buffer(self)?; - result.append(&mut self.0.to_bytes()?); - result.append(&mut self.1.to_bytes()?); - result.append(&mut self.2.to_bytes()?); - result.append(&mut self.3.to_bytes()?); - result.append(&mut self.4.to_bytes()?); - result.append(&mut self.5.to_bytes()?); - result.append(&mut self.6.to_bytes()?); - result.append(&mut self.7.to_bytes()?); - result.append(&mut self.8.to_bytes()?); - result.append(&mut self.9.to_bytes()?); - Ok(result) - } - - fn serialized_length(&self) -> usize { - self.0.serialized_length() - + self.1.serialized_length() - + self.2.serialized_length() - + self.3.serialized_length() - + self.4.serialized_length() - + self.5.serialized_length() - + self.6.serialized_length() - + self.7.serialized_length() - + self.8.serialized_length() - + self.9.serialized_length() - } -} - -impl< - T1: FromBytes, - T2: FromBytes, - T3: FromBytes, - T4: FromBytes, - T5: FromBytes, - T6: FromBytes, - T7: FromBytes, - T8: FromBytes, - T9: FromBytes, - T10: FromBytes, - > FromBytes for (T1, T2, T3, T4, T5, T6, T7, T8, T9, T10) -{ - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (t1, remainder) = T1::from_bytes(bytes)?; - let (t2, remainder) = T2::from_bytes(remainder)?; - let (t3, remainder) = T3::from_bytes(remainder)?; - let (t4, remainder) = T4::from_bytes(remainder)?; - let (t5, remainder) = T5::from_bytes(remainder)?; - let (t6, remainder) = T6::from_bytes(remainder)?; - let (t7, remainder) = T7::from_bytes(remainder)?; - let (t8, remainder) = T8::from_bytes(remainder)?; - let (t9, remainder) = T9::from_bytes(remainder)?; - let (t10, remainder) = T10::from_bytes(remainder)?; - Ok(((t1, t2, t3, t4, t5, t6, t7, t8, t9, t10), remainder)) - } -} - -impl ToBytes for str { - #[inline] - fn to_bytes(&self) -> Result, Error> { - u8_slice_to_bytes(self.as_bytes()) - } - - #[inline] - fn serialized_length(&self) -> usize { - u8_slice_serialized_length(self.as_bytes()) - } - - #[inline] - fn write_bytes(&self, writer: &mut Vec) -> Result<(), Error> { - write_u8_slice(self.as_bytes(), writer)?; - Ok(()) - } -} - -impl ToBytes for &str { - #[inline(always)] - fn to_bytes(&self) -> Result, Error> { - (*self).to_bytes() - } - - #[inline(always)] - fn serialized_length(&self) -> usize { - (*self).serialized_length() - } - - #[inline] - fn write_bytes(&self, writer: &mut Vec) -> Result<(), Error> { - write_u8_slice(self.as_bytes(), writer)?; - Ok(()) - } -} - -impl ToBytes for &T -where - T: ToBytes, -{ - fn to_bytes(&self) -> Result, Error> { - (*self).to_bytes() - } - - fn serialized_length(&self) -> usize { - (*self).serialized_length() - } -} - -impl ToBytes for Ratio -where - T: Clone + Integer + ToBytes, -{ - fn to_bytes(&self) -> Result, Error> { - if self.denom().is_zero() { - return Err(Error::Formatting); - } - (self.numer().clone(), self.denom().clone()).into_bytes() - } - - fn serialized_length(&self) -> usize { - (self.numer().clone(), self.denom().clone()).serialized_length() - } -} - -impl FromBytes for Ratio -where - T: Clone + FromBytes + Integer, -{ - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let ((numer, denom), rem): ((T, T), &[u8]) = FromBytes::from_bytes(bytes)?; - if denom.is_zero() { - return Err(Error::Formatting); - } - Ok((Ratio::new(numer, denom), rem)) - } -} - -/// Serializes a slice of bytes with a length prefix. -/// -/// This function is serializing a slice of bytes with an addition of a 4 byte length prefix. -/// -/// For safety you should prefer to use [`vec_u8_to_bytes`]. For efficiency reasons you should also -/// avoid using serializing Vec. -fn u8_slice_to_bytes(bytes: &[u8]) -> Result, Error> { - let serialized_length = u8_slice_serialized_length(bytes); - let mut vec = try_vec_with_capacity(serialized_length)?; - let length_prefix: u32 = bytes - .len() - .try_into() - .map_err(|_| Error::NotRepresentable)?; - let length_prefix_bytes = length_prefix.to_le_bytes(); - vec.extend_from_slice(&length_prefix_bytes); - vec.extend_from_slice(bytes); - Ok(vec) -} - -fn write_u8_slice(bytes: &[u8], writer: &mut Vec) -> Result<(), Error> { - let length_32: u32 = bytes - .len() - .try_into() - .map_err(|_| Error::NotRepresentable)?; - writer.extend_from_slice(&length_32.to_le_bytes()); - writer.extend_from_slice(bytes); - Ok(()) -} - -/// Serializes a vector of bytes with a length prefix. -/// -/// For efficiency you should avoid serializing Vec. -#[allow(clippy::ptr_arg)] -#[inline] -pub(crate) fn vec_u8_to_bytes(vec: &Vec) -> Result, Error> { - u8_slice_to_bytes(vec.as_slice()) -} - -/// Returns serialized length of serialized slice of bytes. -/// -/// This function adds a length prefix in the beginning. -#[inline(always)] -fn u8_slice_serialized_length(bytes: &[u8]) -> usize { - U32_SERIALIZED_LENGTH + bytes.len() -} - -#[allow(clippy::ptr_arg)] -#[inline] -pub(crate) fn vec_u8_serialized_length(vec: &Vec) -> usize { - u8_slice_serialized_length(vec.as_slice()) -} - -// This test helper is not intended to be used by third party crates. -#[doc(hidden)] -/// Returns `true` if a we can serialize and then deserialize a value -pub fn test_serialization_roundtrip(t: &T) -where - T: alloc::fmt::Debug + ToBytes + FromBytes + PartialEq, -{ - let serialized = ToBytes::to_bytes(t).expect("Unable to serialize data"); - assert_eq!( - serialized.len(), - t.serialized_length(), - "\nLength of serialized data: {},\nserialized_length() yielded: {},\nserialized data: {:?}, t is {:?}", - serialized.len(), - t.serialized_length(), - serialized, - t - ); - let mut written_bytes = vec![]; - t.write_bytes(&mut written_bytes) - .expect("Unable to serialize data via write_bytes"); - assert_eq!(serialized, written_bytes); - - let deserialized_from_slice = - deserialize_from_slice(&serialized).expect("Unable to deserialize data"); - // assert!(*t == deserialized); - assert_eq!(*t, deserialized_from_slice); - - let deserialized = deserialize::(serialized).expect("Unable to deserialize data"); - assert_eq!(*t, deserialized); -} -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn should_not_serialize_zero_denominator() { - let malicious = Ratio::new_raw(1, 0); - assert_eq!(malicious.to_bytes().unwrap_err(), Error::Formatting); - } - - #[test] - fn should_not_deserialize_zero_denominator() { - let malicious_bytes = (1u64, 0u64).to_bytes().unwrap(); - let result: Result, Error> = super::deserialize(malicious_bytes); - assert_eq!(result.unwrap_err(), Error::Formatting); - } - - #[test] - fn should_have_generic_tobytes_impl_for_borrowed_types() { - struct NonCopyable; - - impl ToBytes for NonCopyable { - fn to_bytes(&self) -> Result, Error> { - Ok(vec![1, 2, 3]) - } - - fn serialized_length(&self) -> usize { - 3 - } - } - - let noncopyable: &NonCopyable = &NonCopyable; - - assert_eq!(noncopyable.to_bytes().unwrap(), vec![1, 2, 3]); - assert_eq!(noncopyable.serialized_length(), 3); - assert_eq!(noncopyable.into_bytes().unwrap(), vec![1, 2, 3]); - } - - #[cfg(debug_assertions)] - #[test] - #[should_panic(expected = "You should use Bytes newtype wrapper for efficiency")] - fn should_fail_to_serialize_slice_of_u8() { - let bytes = b"0123456789".to_vec(); - bytes.to_bytes().unwrap(); - } -} - -#[cfg(test)] -mod proptests { - use std::collections::VecDeque; - - use proptest::{collection::vec, prelude::*}; - - use crate::{ - bytesrepr::{self, bytes::gens::bytes_arb, ToBytes}, - gens::*, - }; - - proptest! { - #[test] - fn test_bool(u in any::()) { - bytesrepr::test_serialization_roundtrip(&u); - } - - #[test] - fn test_u8(u in any::()) { - bytesrepr::test_serialization_roundtrip(&u); - } - - #[test] - fn test_u16(u in any::()) { - bytesrepr::test_serialization_roundtrip(&u); - } - - #[test] - fn test_u32(u in any::()) { - bytesrepr::test_serialization_roundtrip(&u); - } - - #[test] - fn test_i32(u in any::()) { - bytesrepr::test_serialization_roundtrip(&u); - } - - #[test] - fn test_u64(u in any::()) { - bytesrepr::test_serialization_roundtrip(&u); - } - - #[test] - fn test_i64(u in any::()) { - bytesrepr::test_serialization_roundtrip(&u); - } - - #[test] - fn test_u8_slice_32(s in u8_slice_32()) { - bytesrepr::test_serialization_roundtrip(&s); - } - - #[test] - fn test_vec_u8(u in bytes_arb(1..100)) { - bytesrepr::test_serialization_roundtrip(&u); - } - - #[test] - fn test_vec_i32(u in vec(any::(), 1..100)) { - bytesrepr::test_serialization_roundtrip(&u); - } - - #[test] - fn test_vecdeque_i32((front, back) in (vec(any::(), 1..100), vec(any::(), 1..100))) { - let mut vec_deque = VecDeque::new(); - for f in front { - vec_deque.push_front(f); - } - for f in back { - vec_deque.push_back(f); - } - bytesrepr::test_serialization_roundtrip(&vec_deque); - } - - #[test] - fn test_vec_vec_u8(u in vec(bytes_arb(1..100), 10)) { - bytesrepr::test_serialization_roundtrip(&u); - } - - #[test] - fn test_uref_map(m in named_keys_arb(20)) { - bytesrepr::test_serialization_roundtrip(&m); - } - - #[test] - fn test_array_u8_32(arr in any::<[u8; 32]>()) { - bytesrepr::test_serialization_roundtrip(&arr); - } - - #[test] - fn test_string(s in "\\PC*") { - bytesrepr::test_serialization_roundtrip(&s); - } - - #[test] - fn test_str(s in "\\PC*") { - let not_a_string_object = s.as_str(); - not_a_string_object.to_bytes().expect("should serialize a str"); - } - - #[test] - fn test_option(o in proptest::option::of(key_arb())) { - bytesrepr::test_serialization_roundtrip(&o); - } - - #[test] - fn test_unit(unit in Just(())) { - bytesrepr::test_serialization_roundtrip(&unit); - } - - #[test] - fn test_u128_serialization(u in u128_arb()) { - bytesrepr::test_serialization_roundtrip(&u); - } - - #[test] - fn test_u256_serialization(u in u256_arb()) { - bytesrepr::test_serialization_roundtrip(&u); - } - - #[test] - fn test_u512_serialization(u in u512_arb()) { - bytesrepr::test_serialization_roundtrip(&u); - } - - #[test] - fn test_key_serialization(key in key_arb()) { - bytesrepr::test_serialization_roundtrip(&key); - } - - #[test] - fn test_cl_value_serialization(cl_value in cl_value_arb()) { - bytesrepr::test_serialization_roundtrip(&cl_value); - } - - #[test] - fn test_access_rights(access_right in access_rights_arb()) { - bytesrepr::test_serialization_roundtrip(&access_right); - } - - #[test] - fn test_uref(uref in uref_arb()) { - bytesrepr::test_serialization_roundtrip(&uref); - } - - #[test] - fn test_account_hash(pk in account_hash_arb()) { - bytesrepr::test_serialization_roundtrip(&pk); - } - - #[test] - fn test_result(result in result_arb()) { - bytesrepr::test_serialization_roundtrip(&result); - } - - #[test] - fn test_phase_serialization(phase in phase_arb()) { - bytesrepr::test_serialization_roundtrip(&phase); - } - - #[test] - fn test_protocol_version(protocol_version in protocol_version_arb()) { - bytesrepr::test_serialization_roundtrip(&protocol_version); - } - - #[test] - fn test_sem_ver(sem_ver in sem_ver_arb()) { - bytesrepr::test_serialization_roundtrip(&sem_ver); - } - - #[test] - fn test_tuple1(t in (any::(),)) { - bytesrepr::test_serialization_roundtrip(&t); - } - - #[test] - fn test_tuple2(t in (any::(),any::())) { - bytesrepr::test_serialization_roundtrip(&t); - } - - #[test] - fn test_tuple3(t in (any::(),any::(),any::())) { - bytesrepr::test_serialization_roundtrip(&t); - } - - #[test] - fn test_tuple4(t in (any::(),any::(),any::(), any::())) { - bytesrepr::test_serialization_roundtrip(&t); - } - #[test] - fn test_tuple5(t in (any::(),any::(),any::(), any::(), any::())) { - bytesrepr::test_serialization_roundtrip(&t); - } - #[test] - fn test_tuple6(t in (any::(),any::(),any::(), any::(), any::(), any::())) { - bytesrepr::test_serialization_roundtrip(&t); - } - #[test] - fn test_tuple7(t in (any::(),any::(),any::(), any::(), any::(), any::(), any::())) { - bytesrepr::test_serialization_roundtrip(&t); - } - #[test] - fn test_tuple8(t in (any::(),any::(),any::(), any::(), any::(), any::(), any::(), any::())) { - bytesrepr::test_serialization_roundtrip(&t); - } - #[test] - fn test_tuple9(t in (any::(),any::(),any::(), any::(), any::(), any::(), any::(), any::(), any::())) { - bytesrepr::test_serialization_roundtrip(&t); - } - #[test] - fn test_tuple10(t in (any::(),any::(),any::(), any::(), any::(), any::(), any::(), any::(), any::(), any::())) { - bytesrepr::test_serialization_roundtrip(&t); - } - #[test] - fn test_ratio_u64(t in (any::(), 1..u64::max_value())) { - bytesrepr::test_serialization_roundtrip(&t); - } - } -} diff --git a/casper_types/src/bytesrepr/bytes.rs b/casper_types/src/bytesrepr/bytes.rs deleted file mode 100644 index 4ecf9747..00000000 --- a/casper_types/src/bytesrepr/bytes.rs +++ /dev/null @@ -1,389 +0,0 @@ -use alloc::{ - string::String, - vec::{IntoIter, Vec}, -}; -use core::{ - cmp, fmt, - iter::FromIterator, - ops::{Deref, Index, Range, RangeFrom, RangeFull, RangeTo}, - slice, -}; - -use rand::{ - distributions::{Distribution, Standard}, - Rng, -}; -use serde::{ - de::{Error as SerdeError, SeqAccess, Visitor}, - Deserialize, Deserializer, Serialize, Serializer, -}; - -use super::{Error, FromBytes, ToBytes}; -use crate::{checksummed_hex, CLType, CLTyped}; - -/// A newtype wrapper for bytes that has efficient serialization routines. -#[derive(Clone, Ord, PartialOrd, Eq, PartialEq, Debug, Default, Hash)] -pub struct Bytes(Vec); - -impl Bytes { - /// Constructs a new, empty vector of bytes. - pub fn new() -> Bytes { - Bytes::default() - } - - /// Returns reference to inner container. - #[inline] - pub fn inner_bytes(&self) -> &Vec { - &self.0 - } - - /// Extracts a slice containing the entire vector. - pub fn as_slice(&self) -> &[u8] { - self - } -} - -impl Deref for Bytes { - type Target = [u8]; - - fn deref(&self) -> &Self::Target { - self.0.deref() - } -} - -impl From> for Bytes { - fn from(vec: Vec) -> Self { - Self(vec) - } -} - -impl From for Vec { - fn from(bytes: Bytes) -> Self { - bytes.0 - } -} - -impl From<&[u8]> for Bytes { - fn from(bytes: &[u8]) -> Self { - Self(bytes.to_vec()) - } -} - -impl CLTyped for Bytes { - fn cl_type() -> CLType { - >::cl_type() - } -} - -impl AsRef<[u8]> for Bytes { - fn as_ref(&self) -> &[u8] { - self.0.as_ref() - } -} - -impl ToBytes for Bytes { - #[inline(always)] - fn to_bytes(&self) -> Result, Error> { - super::vec_u8_to_bytes(&self.0) - } - - #[inline(always)] - fn into_bytes(self) -> Result, Error> { - super::vec_u8_to_bytes(&self.0) - } - - #[inline(always)] - fn serialized_length(&self) -> usize { - super::vec_u8_serialized_length(&self.0) - } - - #[inline(always)] - fn write_bytes(&self, writer: &mut Vec) -> Result<(), Error> { - super::write_u8_slice(self.as_slice(), writer) - } -} - -impl FromBytes for Bytes { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), super::Error> { - let (size, remainder) = u32::from_bytes(bytes)?; - let (result, remainder) = super::safe_split_at(remainder, size as usize)?; - Ok((Bytes(result.to_vec()), remainder)) - } - - fn from_vec(stream: Vec) -> Result<(Self, Vec), Error> { - let (size, mut stream) = u32::from_vec(stream)?; - - if size as usize > stream.len() { - Err(Error::EarlyEndOfStream) - } else { - let remainder = stream.split_off(size as usize); - Ok((Bytes(stream), remainder)) - } - } -} - -impl Index for Bytes { - type Output = u8; - - fn index(&self, index: usize) -> &u8 { - let Bytes(ref dat) = self; - &dat[index] - } -} - -impl Index> for Bytes { - type Output = [u8]; - - fn index(&self, index: Range) -> &[u8] { - let Bytes(dat) = self; - &dat[index] - } -} - -impl Index> for Bytes { - type Output = [u8]; - - fn index(&self, index: RangeTo) -> &[u8] { - let Bytes(dat) = self; - &dat[index] - } -} - -impl Index> for Bytes { - type Output = [u8]; - - fn index(&self, index: RangeFrom) -> &[u8] { - let Bytes(dat) = self; - &dat[index] - } -} - -impl Index for Bytes { - type Output = [u8]; - - fn index(&self, _: RangeFull) -> &[u8] { - let Bytes(dat) = self; - &dat[..] - } -} - -impl FromIterator for Bytes { - #[inline] - fn from_iter>(iter: I) -> Bytes { - let vec = Vec::from_iter(iter); - Bytes(vec) - } -} - -impl<'a> IntoIterator for &'a Bytes { - type Item = &'a u8; - - type IntoIter = slice::Iter<'a, u8>; - - fn into_iter(self) -> Self::IntoIter { - self.0.iter() - } -} - -impl IntoIterator for Bytes { - type Item = u8; - - type IntoIter = IntoIter; - - fn into_iter(self) -> Self::IntoIter { - self.0.into_iter() - } -} - -#[cfg(feature = "datasize")] -impl datasize::DataSize for Bytes { - const IS_DYNAMIC: bool = true; - - const STATIC_HEAP_SIZE: usize = 0; - - fn estimate_heap_size(&self) -> usize { - self.0.capacity() * std::mem::size_of::() - } -} - -const RANDOM_BYTES_MAX_LENGTH: usize = 100; - -impl Distribution for Standard { - fn sample(&self, rng: &mut R) -> Bytes { - let len = rng.gen_range(0..RANDOM_BYTES_MAX_LENGTH); - let mut result = Vec::with_capacity(len); - for _ in 0..len { - result.push(rng.gen()); - } - result.into() - } -} - -struct BytesVisitor; - -impl<'de> Visitor<'de> for BytesVisitor { - type Value = Bytes; - - fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter.write_str("byte array") - } - - fn visit_seq(self, mut visitor: V) -> Result - where - V: SeqAccess<'de>, - { - let len = cmp::min(visitor.size_hint().unwrap_or(0), 4096); - let mut bytes = Vec::with_capacity(len); - - while let Some(b) = visitor.next_element()? { - bytes.push(b); - } - - Ok(Bytes::from(bytes)) - } - - fn visit_bytes(self, v: &[u8]) -> Result - where - E: SerdeError, - { - Ok(Bytes::from(v)) - } - - fn visit_byte_buf(self, v: Vec) -> Result - where - E: SerdeError, - { - Ok(Bytes::from(v)) - } - - fn visit_str(self, v: &str) -> Result - where - E: SerdeError, - { - Ok(Bytes::from(v.as_bytes())) - } - - fn visit_string(self, v: String) -> Result - where - E: SerdeError, - { - Ok(Bytes::from(v.into_bytes())) - } -} - -impl<'de> Deserialize<'de> for Bytes { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - if deserializer.is_human_readable() { - let hex_string = String::deserialize(deserializer)?; - checksummed_hex::decode(hex_string) - .map(Bytes) - .map_err(SerdeError::custom) - } else { - let bytes = deserializer.deserialize_byte_buf(BytesVisitor)?; - Ok(bytes) - } - } -} - -impl Serialize for Bytes { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - if serializer.is_human_readable() { - base16::encode_lower(&self.0).serialize(serializer) - } else { - serializer.serialize_bytes(&self.0) - } - } -} - -#[cfg(test)] -mod tests { - use crate::bytesrepr::{self, Error, FromBytes, ToBytes, U32_SERIALIZED_LENGTH}; - use alloc::vec::Vec; - - use serde_json::json; - use serde_test::{assert_tokens, Configure, Token}; - - use super::Bytes; - - const TRUTH: &[u8] = &[0xde, 0xad, 0xbe, 0xef]; - - #[test] - fn vec_u8_from_bytes() { - let data: Bytes = vec![1, 2, 3, 4, 5].into(); - let data_bytes = data.to_bytes().unwrap(); - assert!(Bytes::from_bytes(&data_bytes[..U32_SERIALIZED_LENGTH / 2]).is_err()); - assert!(Bytes::from_bytes(&data_bytes[..U32_SERIALIZED_LENGTH]).is_err()); - assert!(Bytes::from_bytes(&data_bytes[..U32_SERIALIZED_LENGTH + 2]).is_err()); - } - - #[test] - fn should_serialize_deserialize_bytes() { - let data: Bytes = vec![1, 2, 3, 4, 5].into(); - bytesrepr::test_serialization_roundtrip(&data); - } - - #[test] - fn should_fail_to_serialize_deserialize_malicious_bytes() { - let data: Bytes = vec![1, 2, 3, 4, 5].into(); - let mut serialized = data.to_bytes().expect("should serialize data"); - serialized = serialized[..serialized.len() - 1].to_vec(); - let res: Result<(_, &[u8]), Error> = Bytes::from_bytes(&serialized); - assert_eq!(res.unwrap_err(), Error::EarlyEndOfStream); - } - - #[test] - fn should_serialize_deserialize_bytes_and_keep_rem() { - let data: Bytes = vec![1, 2, 3, 4, 5].into(); - let expected_rem: Vec = vec![6, 7, 8, 9, 10]; - let mut serialized = data.to_bytes().expect("should serialize data"); - serialized.extend(&expected_rem); - let (deserialized, rem): (Bytes, &[u8]) = - FromBytes::from_bytes(&serialized).expect("should deserialize data"); - assert_eq!(data, deserialized); - assert_eq!(&rem, &expected_rem); - } - - #[test] - fn should_ser_de_human_readable() { - let truth = vec![0xde, 0xad, 0xbe, 0xef]; - - let bytes_ser: Bytes = truth.clone().into(); - - let json_object = serde_json::to_value(bytes_ser).unwrap(); - assert_eq!(json_object, json!("deadbeef")); - - let bytes_de: Bytes = serde_json::from_value(json_object).unwrap(); - assert_eq!(bytes_de, Bytes::from(truth)); - } - - #[test] - fn should_ser_de_readable() { - let truth: Bytes = TRUTH.into(); - assert_tokens(&truth.readable(), &[Token::Str("deadbeef")]); - } - - #[test] - fn should_ser_de_compact() { - let truth: Bytes = TRUTH.into(); - assert_tokens(&truth.compact(), &[Token::Bytes(TRUTH)]); - } -} - -#[cfg(test)] -pub mod gens { - use super::Bytes; - use proptest::{ - collection::{vec, SizeRange}, - prelude::*, - }; - - pub fn bytes_arb(size: impl Into) -> impl Strategy { - vec(any::(), size).prop_map(Bytes::from) - } -} diff --git a/casper_types/src/checksummed_hex.rs b/casper_types/src/checksummed_hex.rs deleted file mode 100644 index 165acd3a..00000000 --- a/casper_types/src/checksummed_hex.rs +++ /dev/null @@ -1,241 +0,0 @@ -//! Checksummed hex encoding following an [EIP-55][1]-like scheme. -//! -//! [1]: https://eips.ethereum.org/EIPS/eip-55 - -use alloc::vec::Vec; -use core::ops::RangeInclusive; - -use base16; - -use crate::crypto; - -/// The number of input bytes, at or below which [`decode`] will checksum-decode the output. -pub const SMALL_BYTES_COUNT: usize = 75; - -const HEX_CHARS: [char; 22] = [ - '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f', 'A', 'B', 'C', - 'D', 'E', 'F', -]; - -/// Takes a slice of bytes and breaks it up into a vector of *nibbles* (ie, 4-bit values) -/// represented as `u8`s. -fn bytes_to_nibbles<'a, T: 'a + AsRef<[u8]>>(input: &'a T) -> impl Iterator + 'a { - input - .as_ref() - .iter() - .flat_map(move |byte| [4, 0].iter().map(move |offset| (byte >> offset) & 0x0f)) -} - -/// Takes a slice of bytes and outputs an infinite cyclic stream of bits for those bytes. -fn bytes_to_bits_cycle(bytes: Vec) -> impl Iterator { - bytes - .into_iter() - .cycle() - .flat_map(move |byte| (0..8usize).map(move |offset| ((byte >> offset) & 0x01) == 0x01)) -} - -/// Returns the bytes encoded as hexadecimal with mixed-case based checksums following a scheme -/// similar to [EIP-55](https://eips.ethereum.org/EIPS/eip-55). -/// -/// Key differences: -/// - Works on any length of data, not just 20-byte addresses -/// - Uses Blake2b hashes rather than Keccak -/// - Uses hash bits rather than nibbles -fn encode_iter<'a, T: 'a + AsRef<[u8]>>(input: &'a T) -> impl Iterator + 'a { - let nibbles = bytes_to_nibbles(input); - let mut hash_bits = bytes_to_bits_cycle(crypto::blake2b(input.as_ref()).to_vec()); - nibbles.map(move |mut nibble| { - // Base 16 numbers greater than 10 are represented by the ascii characters a through f. - if nibble >= 10 && hash_bits.next().unwrap_or(true) { - // We are using nibble to index HEX_CHARS, so adding 6 to nibble gives us the index - // of the uppercase character. HEX_CHARS[10] == 'a', HEX_CHARS[16] == 'A'. - nibble += 6; - } - HEX_CHARS[nibble as usize] - }) -} - -/// Returns true if all chars in a string are uppercase or lowercase. -/// Returns false if the string is mixed case or if there are no alphabetic chars. -fn string_is_same_case>(s: T) -> bool { - const LOWER_RANGE: RangeInclusive = b'a'..=b'f'; - const UPPER_RANGE: RangeInclusive = b'A'..=b'F'; - - let mut chars = s - .as_ref() - .iter() - .filter(|c| LOWER_RANGE.contains(c) || UPPER_RANGE.contains(c)); - - match chars.next() { - Some(first) => { - let is_upper = UPPER_RANGE.contains(first); - chars.all(|c| UPPER_RANGE.contains(c) == is_upper) - } - None => { - // String has no actual characters. - true - } - } -} - -/// Decodes a mixed-case hexadecimal string, verifying that it conforms to the checksum scheme -/// similar to scheme in [EIP-55][1]. -/// -/// Key differences: -/// - Works on any length of (decoded) data up to `SMALL_BYTES_COUNT`, not just 20-byte addresses -/// - Uses Blake2b hashes rather than Keccak -/// - Uses hash bits rather than nibbles -/// -/// For backward compatibility: if the hex string is all uppercase or all lowercase, the check is -/// skipped. -/// -/// [1]: https://eips.ethereum.org/EIPS/eip-55 -pub fn decode>(input: T) -> Result, base16::DecodeError> { - let bytes = base16::decode(input.as_ref())?; - - // If the string was not small or not mixed case, don't verify the checksum. - if bytes.len() > SMALL_BYTES_COUNT || string_is_same_case(input.as_ref()) { - return Ok(bytes); - } - - encode_iter(&bytes) - .zip(input.as_ref().iter()) - .enumerate() - .try_for_each(|(index, (expected_case_hex_char, &input_hex_char))| { - if expected_case_hex_char as u8 == input_hex_char { - Ok(()) - } else { - Err(base16::DecodeError::InvalidByte { - index, - byte: expected_case_hex_char as u8, - }) - } - })?; - Ok(bytes) -} - -#[cfg(test)] -mod tests { - use alloc::string::String; - - use proptest::{ - collection::vec, - prelude::{any, prop_assert, prop_assert_eq}, - }; - use proptest_attr_macro::proptest; - - use super::*; - - #[test] - fn should_decode_empty_input() { - let input = String::new(); - let actual = decode(input).unwrap(); - assert!(actual.is_empty()); - } - - #[test] - fn string_is_same_case_true_when_same_case() { - let input = "aaaaaaaaaaa"; - assert!(string_is_same_case(input)); - - let input = "AAAAAAAAAAA"; - assert!(string_is_same_case(input)); - } - - #[test] - fn string_is_same_case_false_when_mixed_case() { - let input = "aAaAaAaAaAa"; - assert!(!string_is_same_case(input)); - } - - #[test] - fn string_is_same_case_no_alphabetic_chars_in_string() { - let input = "424242424242"; - assert!(string_is_same_case(input)); - } - - #[test] - fn should_checksum_decode_only_if_small() { - let input = [255; SMALL_BYTES_COUNT]; - let small_encoded: String = encode_iter(&input).collect(); - assert_eq!(input.to_vec(), decode(&small_encoded).unwrap()); - - assert!(decode("A1a2").is_err()); - - let large_encoded = format!("A1{}", small_encoded); - assert!(decode(large_encoded).is_ok()); - } - - #[proptest] - fn hex_roundtrip(input: Vec) { - prop_assert_eq!( - &input, - &decode(encode_iter(&input).collect::()).expect("Failed to decode input.") - ); - } - - proptest::proptest! { - #[test] - fn should_fail_on_invalid_checksum(input in vec(any::(), 0..75)) { - let encoded: String = encode_iter(&input).collect(); - - // Swap the case of the first letter in the checksum hex-encoded value. - let mut expected_error = None; - let mutated: String = encoded - .char_indices() - .map(|(index, mut c)| { - if expected_error.is_some() || c.is_ascii_digit() { - return c; - } - expected_error = Some(base16::DecodeError::InvalidByte { - index, - byte: c as u8, - }); - if c.is_ascii_uppercase() { - c.make_ascii_lowercase(); - } else { - c.make_ascii_uppercase(); - } - c - }) - .collect(); - - // If the encoded form is now all the same case or digits, just return. - if string_is_same_case(&mutated) { - return Ok(()); - } - - // Assert we can still decode to original input using `base16::decode`. - prop_assert_eq!( - input, - base16::decode(&mutated).expect("Failed to decode input.") - ); - - // Assert decoding using `checksummed_hex::decode` returns the expected error. - prop_assert_eq!(expected_error.unwrap(), decode(&mutated).unwrap_err()) - } - } - - #[proptest] - fn hex_roundtrip_sanity(input: Vec) { - prop_assert!(decode(encode_iter(&input).collect::()).is_ok()) - } - - #[proptest] - fn is_same_case_uppercase(input: String) { - let input = input.to_uppercase(); - prop_assert!(string_is_same_case(input)); - } - - #[proptest] - fn is_same_case_lowercase(input: String) { - let input = input.to_lowercase(); - prop_assert!(string_is_same_case(input)); - } - - #[proptest] - fn is_not_same_case(input: String) { - let input = format!("aA{}", input); - prop_assert!(!string_is_same_case(input)); - } -} diff --git a/casper_types/src/cl_type.rs b/casper_types/src/cl_type.rs deleted file mode 100644 index b49b4ac5..00000000 --- a/casper_types/src/cl_type.rs +++ /dev/null @@ -1,779 +0,0 @@ -// TODO - remove once schemars stops causing warning. -#![allow(clippy::field_reassign_with_default)] - -use alloc::{ - boxed::Box, - collections::{BTreeMap, BTreeSet, VecDeque}, - string::String, - vec::Vec, -}; -use core::mem; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -use num_rational::Ratio; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - Key, URef, U128, U256, U512, -}; - -// This must be less than 300 in order to avoid a stack overflow when deserializing. -pub(crate) const CL_TYPE_RECURSION_DEPTH: u8 = 50; - -const CL_TYPE_TAG_BOOL: u8 = 0; -const CL_TYPE_TAG_I32: u8 = 1; -const CL_TYPE_TAG_I64: u8 = 2; -const CL_TYPE_TAG_U8: u8 = 3; -const CL_TYPE_TAG_U32: u8 = 4; -const CL_TYPE_TAG_U64: u8 = 5; -const CL_TYPE_TAG_U128: u8 = 6; -const CL_TYPE_TAG_U256: u8 = 7; -const CL_TYPE_TAG_U512: u8 = 8; -const CL_TYPE_TAG_UNIT: u8 = 9; -const CL_TYPE_TAG_STRING: u8 = 10; -const CL_TYPE_TAG_KEY: u8 = 11; -const CL_TYPE_TAG_UREF: u8 = 12; -const CL_TYPE_TAG_OPTION: u8 = 13; -const CL_TYPE_TAG_LIST: u8 = 14; -const CL_TYPE_TAG_BYTE_ARRAY: u8 = 15; -const CL_TYPE_TAG_RESULT: u8 = 16; -const CL_TYPE_TAG_MAP: u8 = 17; -const CL_TYPE_TAG_TUPLE1: u8 = 18; -const CL_TYPE_TAG_TUPLE2: u8 = 19; -const CL_TYPE_TAG_TUPLE3: u8 = 20; -const CL_TYPE_TAG_ANY: u8 = 21; -const CL_TYPE_TAG_PUBLIC_KEY: u8 = 22; - -/// Casper types, i.e. types which can be stored and manipulated by smart contracts. -/// -/// Provides a description of the underlying data type of a [`CLValue`](crate::CLValue). -#[derive(PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[serde(deny_unknown_fields)] -pub enum CLType { - /// `bool` primitive. - Bool, - /// `i32` primitive. - I32, - /// `i64` primitive. - I64, - /// `u8` primitive. - U8, - /// `u32` primitive. - U32, - /// `u64` primitive. - U64, - /// [`U128`] large unsigned integer type. - U128, - /// [`U256`] large unsigned integer type. - U256, - /// [`U512`] large unsigned integer type. - U512, - /// `()` primitive. - Unit, - /// `String` primitive. - String, - /// [`Key`] system type. - Key, - /// [`URef`] system type. - URef, - /// [`PublicKey`](crate::PublicKey) system type. - PublicKey, - /// `Option` of a `CLType`. - #[cfg_attr(feature = "datasize", data_size(skip))] - Option(Box), - /// Variable-length list of a single `CLType` (comparable to a `Vec`). - #[cfg_attr(feature = "datasize", data_size(skip))] - List(Box), - /// Fixed-length list of a single `CLType` (comparable to a Rust array). - ByteArray(u32), - /// `Result` with `Ok` and `Err` variants of `CLType`s. - #[allow(missing_docs)] // generated docs are explicit enough. - #[cfg_attr(feature = "datasize", data_size(skip))] - Result { ok: Box, err: Box }, - /// Map with keys of a single `CLType` and values of a single `CLType`. - #[allow(missing_docs)] // generated docs are explicit enough. - #[cfg_attr(feature = "datasize", data_size(skip))] - Map { - key: Box, - value: Box, - }, - /// 1-ary tuple of a `CLType`. - #[cfg_attr(feature = "datasize", data_size(skip))] - Tuple1([Box; 1]), - /// 2-ary tuple of `CLType`s. - #[cfg_attr(feature = "datasize", data_size(skip))] - Tuple2([Box; 2]), - /// 3-ary tuple of `CLType`s. - #[cfg_attr(feature = "datasize", data_size(skip))] - Tuple3([Box; 3]), - /// Unspecified type. - Any, -} - -impl CLType { - /// The `len()` of the `Vec` resulting from `self.to_bytes()`. - pub fn serialized_length(&self) -> usize { - mem::size_of::() - + match self { - CLType::Bool - | CLType::I32 - | CLType::I64 - | CLType::U8 - | CLType::U32 - | CLType::U64 - | CLType::U128 - | CLType::U256 - | CLType::U512 - | CLType::Unit - | CLType::String - | CLType::Key - | CLType::URef - | CLType::PublicKey - | CLType::Any => 0, - CLType::Option(cl_type) | CLType::List(cl_type) => cl_type.serialized_length(), - CLType::ByteArray(list_len) => list_len.serialized_length(), - CLType::Result { ok, err } => ok.serialized_length() + err.serialized_length(), - CLType::Map { key, value } => key.serialized_length() + value.serialized_length(), - CLType::Tuple1(cl_type_array) => serialized_length_of_cl_tuple_type(cl_type_array), - CLType::Tuple2(cl_type_array) => serialized_length_of_cl_tuple_type(cl_type_array), - CLType::Tuple3(cl_type_array) => serialized_length_of_cl_tuple_type(cl_type_array), - } - } - - /// Returns `true` if the [`CLType`] is [`Option`]. - pub fn is_option(&self) -> bool { - matches!(self, Self::Option(..)) - } -} - -/// Returns the `CLType` describing a "named key" on the system, i.e. a `(String, Key)`. -pub fn named_key_type() -> CLType { - CLType::Tuple2([Box::new(CLType::String), Box::new(CLType::Key)]) -} - -impl CLType { - pub(crate) fn append_bytes(&self, stream: &mut Vec) -> Result<(), bytesrepr::Error> { - match self { - CLType::Bool => stream.push(CL_TYPE_TAG_BOOL), - CLType::I32 => stream.push(CL_TYPE_TAG_I32), - CLType::I64 => stream.push(CL_TYPE_TAG_I64), - CLType::U8 => stream.push(CL_TYPE_TAG_U8), - CLType::U32 => stream.push(CL_TYPE_TAG_U32), - CLType::U64 => stream.push(CL_TYPE_TAG_U64), - CLType::U128 => stream.push(CL_TYPE_TAG_U128), - CLType::U256 => stream.push(CL_TYPE_TAG_U256), - CLType::U512 => stream.push(CL_TYPE_TAG_U512), - CLType::Unit => stream.push(CL_TYPE_TAG_UNIT), - CLType::String => stream.push(CL_TYPE_TAG_STRING), - CLType::Key => stream.push(CL_TYPE_TAG_KEY), - CLType::URef => stream.push(CL_TYPE_TAG_UREF), - CLType::PublicKey => stream.push(CL_TYPE_TAG_PUBLIC_KEY), - CLType::Option(cl_type) => { - stream.push(CL_TYPE_TAG_OPTION); - cl_type.append_bytes(stream)?; - } - CLType::List(cl_type) => { - stream.push(CL_TYPE_TAG_LIST); - cl_type.append_bytes(stream)?; - } - CLType::ByteArray(len) => { - stream.push(CL_TYPE_TAG_BYTE_ARRAY); - stream.append(&mut len.to_bytes()?); - } - CLType::Result { ok, err } => { - stream.push(CL_TYPE_TAG_RESULT); - ok.append_bytes(stream)?; - err.append_bytes(stream)?; - } - CLType::Map { key, value } => { - stream.push(CL_TYPE_TAG_MAP); - key.append_bytes(stream)?; - value.append_bytes(stream)?; - } - CLType::Tuple1(cl_type_array) => { - serialize_cl_tuple_type(CL_TYPE_TAG_TUPLE1, cl_type_array, stream)? - } - CLType::Tuple2(cl_type_array) => { - serialize_cl_tuple_type(CL_TYPE_TAG_TUPLE2, cl_type_array, stream)? - } - CLType::Tuple3(cl_type_array) => { - serialize_cl_tuple_type(CL_TYPE_TAG_TUPLE3, cl_type_array, stream)? - } - CLType::Any => stream.push(CL_TYPE_TAG_ANY), - } - Ok(()) - } -} - -impl FromBytes for CLType { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - depth_limited_from_bytes(0, bytes) - } -} - -fn depth_limited_from_bytes(depth: u8, bytes: &[u8]) -> Result<(CLType, &[u8]), bytesrepr::Error> { - if depth >= CL_TYPE_RECURSION_DEPTH { - return Err(bytesrepr::Error::ExceededRecursionDepth); - } - let depth = depth + 1; - let (tag, remainder) = u8::from_bytes(bytes)?; - match tag { - CL_TYPE_TAG_BOOL => Ok((CLType::Bool, remainder)), - CL_TYPE_TAG_I32 => Ok((CLType::I32, remainder)), - CL_TYPE_TAG_I64 => Ok((CLType::I64, remainder)), - CL_TYPE_TAG_U8 => Ok((CLType::U8, remainder)), - CL_TYPE_TAG_U32 => Ok((CLType::U32, remainder)), - CL_TYPE_TAG_U64 => Ok((CLType::U64, remainder)), - CL_TYPE_TAG_U128 => Ok((CLType::U128, remainder)), - CL_TYPE_TAG_U256 => Ok((CLType::U256, remainder)), - CL_TYPE_TAG_U512 => Ok((CLType::U512, remainder)), - CL_TYPE_TAG_UNIT => Ok((CLType::Unit, remainder)), - CL_TYPE_TAG_STRING => Ok((CLType::String, remainder)), - CL_TYPE_TAG_KEY => Ok((CLType::Key, remainder)), - CL_TYPE_TAG_UREF => Ok((CLType::URef, remainder)), - CL_TYPE_TAG_PUBLIC_KEY => Ok((CLType::PublicKey, remainder)), - CL_TYPE_TAG_OPTION => { - let (inner_type, remainder) = depth_limited_from_bytes(depth, remainder)?; - let cl_type = CLType::Option(Box::new(inner_type)); - Ok((cl_type, remainder)) - } - CL_TYPE_TAG_LIST => { - let (inner_type, remainder) = depth_limited_from_bytes(depth, remainder)?; - let cl_type = CLType::List(Box::new(inner_type)); - Ok((cl_type, remainder)) - } - CL_TYPE_TAG_BYTE_ARRAY => { - let (len, remainder) = u32::from_bytes(remainder)?; - let cl_type = CLType::ByteArray(len); - Ok((cl_type, remainder)) - } - CL_TYPE_TAG_RESULT => { - let (ok_type, remainder) = depth_limited_from_bytes(depth, remainder)?; - let (err_type, remainder) = depth_limited_from_bytes(depth, remainder)?; - let cl_type = CLType::Result { - ok: Box::new(ok_type), - err: Box::new(err_type), - }; - Ok((cl_type, remainder)) - } - CL_TYPE_TAG_MAP => { - let (key_type, remainder) = depth_limited_from_bytes(depth, remainder)?; - let (value_type, remainder) = depth_limited_from_bytes(depth, remainder)?; - let cl_type = CLType::Map { - key: Box::new(key_type), - value: Box::new(value_type), - }; - Ok((cl_type, remainder)) - } - CL_TYPE_TAG_TUPLE1 => { - let (mut inner_types, remainder) = parse_cl_tuple_types(depth, 1, remainder)?; - // NOTE: Assumed safe as `parse_cl_tuple_types` is expected to have exactly 1 - // element - let cl_type = CLType::Tuple1([inner_types.pop_front().unwrap()]); - Ok((cl_type, remainder)) - } - CL_TYPE_TAG_TUPLE2 => { - let (mut inner_types, remainder) = parse_cl_tuple_types(depth, 2, remainder)?; - // NOTE: Assumed safe as `parse_cl_tuple_types` is expected to have exactly 2 - // elements - let cl_type = CLType::Tuple2([ - inner_types.pop_front().unwrap(), - inner_types.pop_front().unwrap(), - ]); - Ok((cl_type, remainder)) - } - CL_TYPE_TAG_TUPLE3 => { - let (mut inner_types, remainder) = parse_cl_tuple_types(depth, 3, remainder)?; - // NOTE: Assumed safe as `parse_cl_tuple_types` is expected to have exactly 3 - // elements - let cl_type = CLType::Tuple3([ - inner_types.pop_front().unwrap(), - inner_types.pop_front().unwrap(), - inner_types.pop_front().unwrap(), - ]); - Ok((cl_type, remainder)) - } - CL_TYPE_TAG_ANY => Ok((CLType::Any, remainder)), - _ => Err(bytesrepr::Error::Formatting), - } -} - -fn serialize_cl_tuple_type<'a, T: IntoIterator>>( - tag: u8, - cl_type_array: T, - stream: &mut Vec, -) -> Result<(), bytesrepr::Error> { - stream.push(tag); - for cl_type in cl_type_array { - cl_type.append_bytes(stream)?; - } - Ok(()) -} - -fn parse_cl_tuple_types( - depth: u8, - count: usize, - mut bytes: &[u8], -) -> Result<(VecDeque>, &[u8]), bytesrepr::Error> { - let mut cl_types = VecDeque::with_capacity(count); - for _ in 0..count { - let (cl_type, remainder) = depth_limited_from_bytes(depth, bytes)?; - cl_types.push_back(Box::new(cl_type)); - bytes = remainder; - } - - Ok((cl_types, bytes)) -} - -fn serialized_length_of_cl_tuple_type<'a, T: IntoIterator>>( - cl_type_array: T, -) -> usize { - cl_type_array - .into_iter() - .map(|cl_type| cl_type.serialized_length()) - .sum() -} - -/// A type which can be described as a [`CLType`]. -pub trait CLTyped { - /// The `CLType` of `Self`. - fn cl_type() -> CLType; -} - -impl CLTyped for bool { - fn cl_type() -> CLType { - CLType::Bool - } -} - -impl CLTyped for i32 { - fn cl_type() -> CLType { - CLType::I32 - } -} - -impl CLTyped for i64 { - fn cl_type() -> CLType { - CLType::I64 - } -} - -impl CLTyped for u8 { - fn cl_type() -> CLType { - CLType::U8 - } -} - -impl CLTyped for u32 { - fn cl_type() -> CLType { - CLType::U32 - } -} - -impl CLTyped for u64 { - fn cl_type() -> CLType { - CLType::U64 - } -} - -impl CLTyped for U128 { - fn cl_type() -> CLType { - CLType::U128 - } -} - -impl CLTyped for U256 { - fn cl_type() -> CLType { - CLType::U256 - } -} - -impl CLTyped for U512 { - fn cl_type() -> CLType { - CLType::U512 - } -} - -impl CLTyped for () { - fn cl_type() -> CLType { - CLType::Unit - } -} - -impl CLTyped for String { - fn cl_type() -> CLType { - CLType::String - } -} - -impl CLTyped for &str { - fn cl_type() -> CLType { - CLType::String - } -} - -impl CLTyped for Key { - fn cl_type() -> CLType { - CLType::Key - } -} - -impl CLTyped for URef { - fn cl_type() -> CLType { - CLType::URef - } -} - -impl CLTyped for Option { - fn cl_type() -> CLType { - CLType::Option(Box::new(T::cl_type())) - } -} - -impl CLTyped for Vec { - fn cl_type() -> CLType { - CLType::List(Box::new(T::cl_type())) - } -} - -impl CLTyped for BTreeSet { - fn cl_type() -> CLType { - CLType::List(Box::new(T::cl_type())) - } -} - -impl CLTyped for &T { - fn cl_type() -> CLType { - T::cl_type() - } -} - -impl CLTyped for [u8; COUNT] { - fn cl_type() -> CLType { - CLType::ByteArray(COUNT as u32) - } -} - -impl CLTyped for Result { - fn cl_type() -> CLType { - let ok = Box::new(T::cl_type()); - let err = Box::new(E::cl_type()); - CLType::Result { ok, err } - } -} - -impl CLTyped for BTreeMap { - fn cl_type() -> CLType { - let key = Box::new(K::cl_type()); - let value = Box::new(V::cl_type()); - CLType::Map { key, value } - } -} - -impl CLTyped for (T1,) { - fn cl_type() -> CLType { - CLType::Tuple1([Box::new(T1::cl_type())]) - } -} - -impl CLTyped for (T1, T2) { - fn cl_type() -> CLType { - CLType::Tuple2([Box::new(T1::cl_type()), Box::new(T2::cl_type())]) - } -} - -impl CLTyped for (T1, T2, T3) { - fn cl_type() -> CLType { - CLType::Tuple3([ - Box::new(T1::cl_type()), - Box::new(T2::cl_type()), - Box::new(T3::cl_type()), - ]) - } -} - -impl CLTyped for Ratio { - fn cl_type() -> CLType { - <(T, T)>::cl_type() - } -} - -#[cfg(test)] -mod tests { - use std::{fmt::Debug, iter, string::ToString}; - - use super::*; - use crate::{ - bytesrepr::{FromBytes, ToBytes}, - AccessRights, CLValue, - }; - - fn round_trip(value: &T) { - let cl_value = CLValue::from_t(value.clone()).unwrap(); - - let serialized_cl_value = cl_value.to_bytes().unwrap(); - assert_eq!(serialized_cl_value.len(), cl_value.serialized_length()); - let parsed_cl_value: CLValue = bytesrepr::deserialize(serialized_cl_value).unwrap(); - assert_eq!(cl_value, parsed_cl_value); - - let parsed_value = CLValue::into_t(cl_value).unwrap(); - assert_eq!(*value, parsed_value); - } - - #[test] - fn bool_should_work() { - round_trip(&true); - round_trip(&false); - } - - #[test] - fn u8_should_work() { - round_trip(&1u8); - } - - #[test] - fn u32_should_work() { - round_trip(&1u32); - } - - #[test] - fn i32_should_work() { - round_trip(&-1i32); - } - - #[test] - fn u64_should_work() { - round_trip(&1u64); - } - - #[test] - fn i64_should_work() { - round_trip(&-1i64); - } - - #[test] - fn u128_should_work() { - round_trip(&U128::one()); - } - - #[test] - fn u256_should_work() { - round_trip(&U256::one()); - } - - #[test] - fn u512_should_work() { - round_trip(&U512::one()); - } - - #[test] - fn unit_should_work() { - round_trip(&()); - } - - #[test] - fn string_should_work() { - round_trip(&String::from("abc")); - } - - #[test] - fn key_should_work() { - let key = Key::URef(URef::new([0u8; 32], AccessRights::READ_ADD_WRITE)); - round_trip(&key); - } - - #[test] - fn uref_should_work() { - let uref = URef::new([0u8; 32], AccessRights::READ_ADD_WRITE); - round_trip(&uref); - } - - #[test] - fn option_of_cl_type_should_work() { - let x: Option = Some(-1); - let y: Option = None; - - round_trip(&x); - round_trip(&y); - } - - #[test] - fn vec_of_cl_type_should_work() { - let vec = vec![String::from("a"), String::from("b")]; - round_trip(&vec); - } - - #[test] - #[allow(clippy::cognitive_complexity)] - fn small_array_of_u8_should_work() { - macro_rules! test_small_array { - ($($N:literal)+) => { - $( - let mut array: [u8; $N] = Default::default(); - for i in 0..$N { - array[i] = i as u8; - } - round_trip(&array); - )+ - } - } - - test_small_array! { - 1 2 3 4 5 6 7 8 9 - 10 11 12 13 14 15 16 17 18 19 - 20 21 22 23 24 25 26 27 28 29 - 30 31 32 - } - } - - #[test] - fn large_array_of_cl_type_should_work() { - macro_rules! test_large_array { - ($($N:literal)+) => { - $( - let array = { - let mut tmp = [0u8; $N]; - for i in 0..$N { - tmp[i] = i as u8; - } - tmp - }; - - let cl_value = CLValue::from_t(array.clone()).unwrap(); - - let serialized_cl_value = cl_value.to_bytes().unwrap(); - let parsed_cl_value: CLValue = bytesrepr::deserialize(serialized_cl_value).unwrap(); - assert_eq!(cl_value, parsed_cl_value); - - let parsed_value: [u8; $N] = CLValue::into_t(cl_value).unwrap(); - for i in 0..$N { - assert_eq!(array[i], parsed_value[i]); - } - )+ - } - } - - test_large_array! { 64 128 256 512 } - } - - #[test] - fn result_of_cl_type_should_work() { - let x: Result<(), String> = Ok(()); - let y: Result<(), String> = Err(String::from("Hello, world!")); - - round_trip(&x); - round_trip(&y); - } - - #[test] - fn map_of_cl_type_should_work() { - let mut map: BTreeMap = BTreeMap::new(); - map.insert(String::from("abc"), 1); - map.insert(String::from("xyz"), 2); - - round_trip(&map); - } - - #[test] - fn tuple_1_should_work() { - let x = (-1i32,); - - round_trip(&x); - } - - #[test] - fn tuple_2_should_work() { - let x = (-1i32, String::from("a")); - - round_trip(&x); - } - - #[test] - fn tuple_3_should_work() { - let x = (-1i32, 1u32, String::from("a")); - - round_trip(&x); - } - - #[test] - fn parsing_nested_tuple_1_cltype_should_not_stack_overflow() { - // The bytesrepr representation of the CLType for a - // nested (((...((),),...),),) looks like: - // [18, 18, 18, ..., 9] - - for i in 1..1000 { - let bytes = iter::repeat(CL_TYPE_TAG_TUPLE1) - .take(i) - .chain(iter::once(CL_TYPE_TAG_UNIT)) - .collect(); - match bytesrepr::deserialize(bytes) { - Ok(parsed_cltype) => assert!(matches!(parsed_cltype, CLType::Tuple1(_))), - Err(error) => assert_eq!(error, bytesrepr::Error::ExceededRecursionDepth), - } - } - } - - #[test] - fn parsing_nested_tuple_1_value_should_not_stack_overflow() { - // The bytesrepr representation of the CLValue for a - // nested (((...((),),...),),) looks like: - // [0, 0, 0, 0, 18, 18, 18, ..., 18, 9] - - for i in 1..1000 { - let bytes = iter::repeat(0) - .take(4) - .chain(iter::repeat(CL_TYPE_TAG_TUPLE1).take(i)) - .chain(iter::once(CL_TYPE_TAG_UNIT)) - .collect(); - match bytesrepr::deserialize::(bytes) { - Ok(parsed_clvalue) => { - assert!(matches!(parsed_clvalue.cl_type(), CLType::Tuple1(_))) - } - Err(error) => assert_eq!(error, bytesrepr::Error::ExceededRecursionDepth), - } - } - } - - #[test] - fn any_should_work() { - #[derive(PartialEq, Debug, Clone)] - struct Any(String); - - impl CLTyped for Any { - fn cl_type() -> CLType { - CLType::Any - } - } - - impl ToBytes for Any { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - self.0.to_bytes() - } - - fn serialized_length(&self) -> usize { - self.0.serialized_length() - } - } - - impl FromBytes for Any { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (inner, remainder) = String::from_bytes(bytes)?; - Ok((Any(inner), remainder)) - } - } - - let any = Any("Any test".to_string()); - round_trip(&any); - } - - #[test] - fn should_have_cltype_of_ref_to_cltyped() { - assert_eq!(>::cl_type(), >::cl_type()) - } -} diff --git a/casper_types/src/cl_value.rs b/casper_types/src/cl_value.rs deleted file mode 100644 index 1dc1bee5..00000000 --- a/casper_types/src/cl_value.rs +++ /dev/null @@ -1,1197 +0,0 @@ -// TODO - remove once schemars stops causing warning. -#![allow(clippy::field_reassign_with_default)] - -use alloc::{string::String, vec::Vec}; -use core::fmt::{self, Display, Formatter}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(feature = "json-schema")] -use schemars::{gen::SchemaGenerator, schema::Schema, JsonSchema}; -use serde::{de::Error as SerdeError, Deserialize, Deserializer, Serialize, Serializer}; -use serde_json::Value; - -use crate::{ - bytesrepr::{self, Bytes, FromBytes, ToBytes, U32_SERIALIZED_LENGTH}, - checksummed_hex, CLType, CLTyped, -}; - -mod jsonrepr; - -/// Error while converting a [`CLValue`] into a given type. -#[derive(PartialEq, Eq, Clone, Debug, Serialize, Deserialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -pub struct CLTypeMismatch { - /// The [`CLType`] into which the `CLValue` was being converted. - pub expected: CLType, - /// The actual underlying [`CLType`] of this `CLValue`, i.e. the type from which it was - /// constructed. - pub found: CLType, -} - -impl Display for CLTypeMismatch { - fn fmt(&self, f: &mut Formatter) -> fmt::Result { - write!( - f, - "Expected {:?} but found {:?}.", - self.expected, self.found - ) - } -} - -/// Error relating to [`CLValue`] operations. -#[derive(PartialEq, Eq, Clone, Debug, Serialize, Deserialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -pub enum CLValueError { - /// An error while serializing or deserializing the underlying data. - Serialization(bytesrepr::Error), - /// A type mismatch while trying to convert a [`CLValue`] into a given type. - Type(CLTypeMismatch), -} - -impl From for CLValueError { - fn from(error: bytesrepr::Error) -> Self { - CLValueError::Serialization(error) - } -} - -impl Display for CLValueError { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - match self { - CLValueError::Serialization(error) => write!(formatter, "CLValue error: {}", error), - CLValueError::Type(error) => write!(formatter, "Type mismatch: {}", error), - } - } -} - -/// A Casper value, i.e. a value which can be stored and manipulated by smart contracts. -/// -/// It holds the underlying data as a type-erased, serialized `Vec` and also holds the -/// [`CLType`] of the underlying data as a separate member. -#[derive(PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -pub struct CLValue { - cl_type: CLType, - bytes: Bytes, -} - -impl CLValue { - /// Constructs a `CLValue` from `t`. - pub fn from_t(t: T) -> Result { - let bytes = t.into_bytes()?; - - Ok(CLValue { - cl_type: T::cl_type(), - bytes: bytes.into(), - }) - } - - /// Consumes and converts `self` back into its underlying type. - pub fn into_t(self) -> Result { - let expected = T::cl_type(); - - if self.cl_type == expected { - Ok(bytesrepr::deserialize_from_slice(&self.bytes)?) - } else { - Err(CLValueError::Type(CLTypeMismatch { - expected, - found: self.cl_type, - })) - } - } - - /// A convenience method to create CLValue for a unit. - pub fn unit() -> Self { - CLValue::from_components(CLType::Unit, Vec::new()) - } - - // This is only required in order to implement `TryFrom for CLValue` (i.e. the - // conversion from the Protobuf `CLValue`) in a separate module to this one. - #[doc(hidden)] - pub fn from_components(cl_type: CLType, bytes: Vec) -> Self { - Self { - cl_type, - bytes: bytes.into(), - } - } - - // This is only required in order to implement `From for state::CLValue` (i.e. the - // conversion to the Protobuf `CLValue`) in a separate module to this one. - #[doc(hidden)] - pub fn destructure(self) -> (CLType, Bytes) { - (self.cl_type, self.bytes) - } - - /// The [`CLType`] of the underlying data. - pub fn cl_type(&self) -> &CLType { - &self.cl_type - } - - /// Returns a reference to the serialized form of the underlying value held in this `CLValue`. - pub fn inner_bytes(&self) -> &Vec { - self.bytes.inner_bytes() - } - - /// Returns the length of the `Vec` yielded after calling `self.to_bytes()`. - /// - /// Note, this method doesn't actually serialize `self`, and hence is relatively cheap. - pub fn serialized_length(&self) -> usize { - self.cl_type.serialized_length() + U32_SERIALIZED_LENGTH + self.bytes.len() - } -} - -impl ToBytes for CLValue { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - self.clone().into_bytes() - } - - fn into_bytes(self) -> Result, bytesrepr::Error> { - let mut result = self.bytes.into_bytes()?; - self.cl_type.append_bytes(&mut result)?; - Ok(result) - } - - fn serialized_length(&self) -> usize { - self.bytes.serialized_length() + self.cl_type.serialized_length() - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.bytes.write_bytes(writer)?; - self.cl_type.append_bytes(writer)?; - Ok(()) - } -} - -impl FromBytes for CLValue { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (bytes, remainder) = FromBytes::from_bytes(bytes)?; - let (cl_type, remainder) = FromBytes::from_bytes(remainder)?; - let cl_value = CLValue { cl_type, bytes }; - Ok((cl_value, remainder)) - } -} - -/// We need to implement `JsonSchema` for `CLValue` as though it is a `CLValueJson`. -#[cfg(feature = "json-schema")] -impl JsonSchema for CLValue { - fn schema_name() -> String { - "CLValue".to_string() - } - - fn json_schema(gen: &mut SchemaGenerator) -> Schema { - ::json_schema(gen) - } -} - -/// A Casper value, i.e. a value which can be stored and manipulated by smart contracts. -/// -/// It holds the underlying data as a type-erased, serialized `Vec` and also holds the CLType of -/// the underlying data as a separate member. -/// -/// The `parsed` field, representing the original value, is a convenience only available when a -/// CLValue is encoded to JSON, and can always be set to null if preferred. -#[derive(Serialize, Deserialize)] -#[serde(deny_unknown_fields)] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[cfg_attr(feature = "json-schema", schemars(rename = "CLValue"))] -struct CLValueJson { - cl_type: CLType, - bytes: String, - parsed: Option, -} - -impl Serialize for CLValue { - fn serialize(&self, serializer: S) -> Result { - if serializer.is_human_readable() { - CLValueJson { - cl_type: self.cl_type.clone(), - bytes: base16::encode_lower(&self.bytes), - parsed: jsonrepr::cl_value_to_json(self), - } - .serialize(serializer) - } else { - (&self.cl_type, &self.bytes).serialize(serializer) - } - } -} - -impl<'de> Deserialize<'de> for CLValue { - fn deserialize>(deserializer: D) -> Result { - let (cl_type, bytes) = if deserializer.is_human_readable() { - let json = CLValueJson::deserialize(deserializer)?; - ( - json.cl_type.clone(), - checksummed_hex::decode(&json.bytes).map_err(D::Error::custom)?, - ) - } else { - <(CLType, Vec)>::deserialize(deserializer)? - }; - Ok(CLValue { - cl_type, - bytes: bytes.into(), - }) - } -} - -#[cfg(test)] -mod tests { - use alloc::string::ToString; - - #[cfg(feature = "json-schema")] - use schemars::schema_for; - - use super::*; - use crate::{ - account::{AccountHash, ACCOUNT_HASH_LENGTH}, - key::KEY_HASH_LENGTH, - AccessRights, DeployHash, Key, PublicKey, TransferAddr, URef, DEPLOY_HASH_LENGTH, - TRANSFER_ADDR_LENGTH, U128, U256, U512, UREF_ADDR_LENGTH, - }; - - #[cfg(feature = "json-schema")] - #[test] - fn json_schema() { - let json_clvalue_schema = schema_for!(CLValueJson); - let clvalue_schema = schema_for!(CLValue); - assert_eq!(json_clvalue_schema, clvalue_schema); - } - - #[test] - fn serde_roundtrip() { - let cl_value = CLValue::from_t(true).unwrap(); - let serialized = bincode::serialize(&cl_value).unwrap(); - let decoded = bincode::deserialize(&serialized).unwrap(); - assert_eq!(cl_value, decoded); - } - - #[test] - fn json_roundtrip() { - let cl_value = CLValue::from_t(true).unwrap(); - let json_string = serde_json::to_string_pretty(&cl_value).unwrap(); - let decoded = serde_json::from_str(&json_string).unwrap(); - assert_eq!(cl_value, decoded); - } - - fn check_to_json(value: T, expected: &str) { - let cl_value = CLValue::from_t(value).unwrap(); - let cl_value_as_json = serde_json::to_string(&cl_value).unwrap(); - // Remove the `serialized_bytes` field: - // Split the string at `,"serialized_bytes":`. - let pattern = r#","bytes":""#; - let start_index = cl_value_as_json.find(pattern).unwrap(); - let (start, end) = cl_value_as_json.split_at(start_index); - // Find the end of the value of the `bytes` field, and split there. - let mut json_without_serialize_bytes = start.to_string(); - for (index, char) in end.char_indices().skip(pattern.len()) { - if char == '"' { - let (_to_remove, to_keep) = end.split_at(index + 1); - json_without_serialize_bytes.push_str(to_keep); - break; - } - } - assert_eq!(json_without_serialize_bytes, expected); - } - - mod simple_types { - use super::*; - use crate::crypto::SecretKey; - - #[test] - fn bool_cl_value_should_encode_to_json() { - check_to_json(true, r#"{"cl_type":"Bool","parsed":true}"#); - check_to_json(false, r#"{"cl_type":"Bool","parsed":false}"#); - } - - #[test] - fn i32_cl_value_should_encode_to_json() { - check_to_json( - i32::min_value(), - r#"{"cl_type":"I32","parsed":-2147483648}"#, - ); - check_to_json(0_i32, r#"{"cl_type":"I32","parsed":0}"#); - check_to_json(i32::max_value(), r#"{"cl_type":"I32","parsed":2147483647}"#); - } - - #[test] - fn i64_cl_value_should_encode_to_json() { - check_to_json( - i64::min_value(), - r#"{"cl_type":"I64","parsed":-9223372036854775808}"#, - ); - check_to_json(0_i64, r#"{"cl_type":"I64","parsed":0}"#); - check_to_json( - i64::max_value(), - r#"{"cl_type":"I64","parsed":9223372036854775807}"#, - ); - } - - #[test] - fn u8_cl_value_should_encode_to_json() { - check_to_json(0_u8, r#"{"cl_type":"U8","parsed":0}"#); - check_to_json(u8::max_value(), r#"{"cl_type":"U8","parsed":255}"#); - } - - #[test] - fn u32_cl_value_should_encode_to_json() { - check_to_json(0_u32, r#"{"cl_type":"U32","parsed":0}"#); - check_to_json(u32::max_value(), r#"{"cl_type":"U32","parsed":4294967295}"#); - } - - #[test] - fn u64_cl_value_should_encode_to_json() { - check_to_json(0_u64, r#"{"cl_type":"U64","parsed":0}"#); - check_to_json( - u64::max_value(), - r#"{"cl_type":"U64","parsed":18446744073709551615}"#, - ); - } - - #[test] - fn u128_cl_value_should_encode_to_json() { - check_to_json(U128::zero(), r#"{"cl_type":"U128","parsed":"0"}"#); - check_to_json( - U128::max_value(), - r#"{"cl_type":"U128","parsed":"340282366920938463463374607431768211455"}"#, - ); - } - - #[test] - fn u256_cl_value_should_encode_to_json() { - check_to_json(U256::zero(), r#"{"cl_type":"U256","parsed":"0"}"#); - check_to_json( - U256::max_value(), - r#"{"cl_type":"U256","parsed":"115792089237316195423570985008687907853269984665640564039457584007913129639935"}"#, - ); - } - - #[test] - fn u512_cl_value_should_encode_to_json() { - check_to_json(U512::zero(), r#"{"cl_type":"U512","parsed":"0"}"#); - check_to_json( - U512::max_value(), - r#"{"cl_type":"U512","parsed":"13407807929942597099574024998205846127479365820592393377723561443721764030073546976801874298166903427690031858186486050853753882811946569946433649006084095"}"#, - ); - } - - #[test] - fn unit_cl_value_should_encode_to_json() { - check_to_json((), r#"{"cl_type":"Unit","parsed":null}"#); - } - - #[test] - fn string_cl_value_should_encode_to_json() { - check_to_json(String::new(), r#"{"cl_type":"String","parsed":""}"#); - check_to_json( - "test string".to_string(), - r#"{"cl_type":"String","parsed":"test string"}"#, - ); - } - - #[test] - fn key_cl_value_should_encode_to_json() { - let key_account = Key::Account(AccountHash::new([1; ACCOUNT_HASH_LENGTH])); - check_to_json( - key_account, - r#"{"cl_type":"Key","parsed":{"Account":"account-hash-0101010101010101010101010101010101010101010101010101010101010101"}}"#, - ); - - let key_hash = Key::Hash([2; KEY_HASH_LENGTH]); - check_to_json( - key_hash, - r#"{"cl_type":"Key","parsed":{"Hash":"hash-0202020202020202020202020202020202020202020202020202020202020202"}}"#, - ); - - let key_uref = Key::URef(URef::new([3; UREF_ADDR_LENGTH], AccessRights::READ)); - check_to_json( - key_uref, - r#"{"cl_type":"Key","parsed":{"URef":"uref-0303030303030303030303030303030303030303030303030303030303030303-001"}}"#, - ); - - let key_transfer = Key::Transfer(TransferAddr::new([4; TRANSFER_ADDR_LENGTH])); - check_to_json( - key_transfer, - r#"{"cl_type":"Key","parsed":{"Transfer":"transfer-0404040404040404040404040404040404040404040404040404040404040404"}}"#, - ); - - let key_deploy_info = Key::DeployInfo(DeployHash::new([5; DEPLOY_HASH_LENGTH])); - check_to_json( - key_deploy_info, - r#"{"cl_type":"Key","parsed":{"DeployInfo":"deploy-0505050505050505050505050505050505050505050505050505050505050505"}}"#, - ); - } - - #[test] - fn uref_cl_value_should_encode_to_json() { - let uref = URef::new([6; UREF_ADDR_LENGTH], AccessRights::READ_ADD_WRITE); - check_to_json( - uref, - r#"{"cl_type":"URef","parsed":"uref-0606060606060606060606060606060606060606060606060606060606060606-007"}"#, - ); - } - - #[test] - fn public_key_cl_value_should_encode_to_json() { - check_to_json( - PublicKey::from( - &SecretKey::ed25519_from_bytes([7; SecretKey::ED25519_LENGTH]).unwrap(), - ), - r#"{"cl_type":"PublicKey","parsed":"01ea4a6c63e29c520abef5507b132ec5f9954776aebebe7b92421eea691446d22c"}"#, - ); - check_to_json( - PublicKey::from( - &SecretKey::secp256k1_from_bytes([8; SecretKey::SECP256K1_LENGTH]).unwrap(), - ), - r#"{"cl_type":"PublicKey","parsed":"0203f991f944d1e1954a7fc8b9bf62e0d78f015f4c07762d505e20e6c45260a3661b"}"#, - ); - } - } - - mod option { - use super::*; - use crate::crypto::SecretKey; - - #[test] - fn bool_cl_value_should_encode_to_json() { - check_to_json(Some(true), r#"{"cl_type":{"Option":"Bool"},"parsed":true}"#); - check_to_json( - Some(false), - r#"{"cl_type":{"Option":"Bool"},"parsed":false}"#, - ); - check_to_json( - Option::::None, - r#"{"cl_type":{"Option":"Bool"},"parsed":null}"#, - ); - } - - #[test] - fn i32_cl_value_should_encode_to_json() { - check_to_json( - Some(i32::min_value()), - r#"{"cl_type":{"Option":"I32"},"parsed":-2147483648}"#, - ); - check_to_json(Some(0_i32), r#"{"cl_type":{"Option":"I32"},"parsed":0}"#); - check_to_json( - Some(i32::max_value()), - r#"{"cl_type":{"Option":"I32"},"parsed":2147483647}"#, - ); - check_to_json( - Option::::None, - r#"{"cl_type":{"Option":"I32"},"parsed":null}"#, - ); - } - - #[test] - fn i64_cl_value_should_encode_to_json() { - check_to_json( - Some(i64::min_value()), - r#"{"cl_type":{"Option":"I64"},"parsed":-9223372036854775808}"#, - ); - check_to_json(Some(0_i64), r#"{"cl_type":{"Option":"I64"},"parsed":0}"#); - check_to_json( - Some(i64::max_value()), - r#"{"cl_type":{"Option":"I64"},"parsed":9223372036854775807}"#, - ); - check_to_json( - Option::::None, - r#"{"cl_type":{"Option":"I64"},"parsed":null}"#, - ); - } - - #[test] - fn u8_cl_value_should_encode_to_json() { - check_to_json(Some(0_u8), r#"{"cl_type":{"Option":"U8"},"parsed":0}"#); - check_to_json( - Some(u8::max_value()), - r#"{"cl_type":{"Option":"U8"},"parsed":255}"#, - ); - check_to_json( - Option::::None, - r#"{"cl_type":{"Option":"U8"},"parsed":null}"#, - ); - } - - #[test] - fn u32_cl_value_should_encode_to_json() { - check_to_json(Some(0_u32), r#"{"cl_type":{"Option":"U32"},"parsed":0}"#); - check_to_json( - Some(u32::max_value()), - r#"{"cl_type":{"Option":"U32"},"parsed":4294967295}"#, - ); - check_to_json( - Option::::None, - r#"{"cl_type":{"Option":"U32"},"parsed":null}"#, - ); - } - - #[test] - fn u64_cl_value_should_encode_to_json() { - check_to_json(Some(0_u64), r#"{"cl_type":{"Option":"U64"},"parsed":0}"#); - check_to_json( - Some(u64::max_value()), - r#"{"cl_type":{"Option":"U64"},"parsed":18446744073709551615}"#, - ); - check_to_json( - Option::::None, - r#"{"cl_type":{"Option":"U64"},"parsed":null}"#, - ); - } - - #[test] - fn u128_cl_value_should_encode_to_json() { - check_to_json( - Some(U128::zero()), - r#"{"cl_type":{"Option":"U128"},"parsed":"0"}"#, - ); - check_to_json( - Some(U128::max_value()), - r#"{"cl_type":{"Option":"U128"},"parsed":"340282366920938463463374607431768211455"}"#, - ); - check_to_json( - Option::::None, - r#"{"cl_type":{"Option":"U128"},"parsed":null}"#, - ); - } - - #[test] - fn u256_cl_value_should_encode_to_json() { - check_to_json( - Some(U256::zero()), - r#"{"cl_type":{"Option":"U256"},"parsed":"0"}"#, - ); - check_to_json( - Some(U256::max_value()), - r#"{"cl_type":{"Option":"U256"},"parsed":"115792089237316195423570985008687907853269984665640564039457584007913129639935"}"#, - ); - check_to_json( - Option::::None, - r#"{"cl_type":{"Option":"U256"},"parsed":null}"#, - ); - } - - #[test] - fn u512_cl_value_should_encode_to_json() { - check_to_json( - Some(U512::zero()), - r#"{"cl_type":{"Option":"U512"},"parsed":"0"}"#, - ); - check_to_json( - Some(U512::max_value()), - r#"{"cl_type":{"Option":"U512"},"parsed":"13407807929942597099574024998205846127479365820592393377723561443721764030073546976801874298166903427690031858186486050853753882811946569946433649006084095"}"#, - ); - check_to_json( - Option::::None, - r#"{"cl_type":{"Option":"U512"},"parsed":null}"#, - ); - } - - #[test] - fn unit_cl_value_should_encode_to_json() { - check_to_json(Some(()), r#"{"cl_type":{"Option":"Unit"},"parsed":null}"#); - check_to_json( - Option::<()>::None, - r#"{"cl_type":{"Option":"Unit"},"parsed":null}"#, - ); - } - - #[test] - fn string_cl_value_should_encode_to_json() { - check_to_json( - Some(String::new()), - r#"{"cl_type":{"Option":"String"},"parsed":""}"#, - ); - check_to_json( - Some("test string".to_string()), - r#"{"cl_type":{"Option":"String"},"parsed":"test string"}"#, - ); - check_to_json( - Option::::None, - r#"{"cl_type":{"Option":"String"},"parsed":null}"#, - ); - } - - #[test] - fn key_cl_value_should_encode_to_json() { - let key_account = Key::Account(AccountHash::new([1; ACCOUNT_HASH_LENGTH])); - check_to_json( - Some(key_account), - r#"{"cl_type":{"Option":"Key"},"parsed":{"Account":"account-hash-0101010101010101010101010101010101010101010101010101010101010101"}}"#, - ); - - let key_hash = Key::Hash([2; KEY_HASH_LENGTH]); - check_to_json( - Some(key_hash), - r#"{"cl_type":{"Option":"Key"},"parsed":{"Hash":"hash-0202020202020202020202020202020202020202020202020202020202020202"}}"#, - ); - - let key_uref = Key::URef(URef::new([3; UREF_ADDR_LENGTH], AccessRights::READ)); - check_to_json( - Some(key_uref), - r#"{"cl_type":{"Option":"Key"},"parsed":{"URef":"uref-0303030303030303030303030303030303030303030303030303030303030303-001"}}"#, - ); - - let key_transfer = Key::Transfer(TransferAddr::new([4; TRANSFER_ADDR_LENGTH])); - check_to_json( - Some(key_transfer), - r#"{"cl_type":{"Option":"Key"},"parsed":{"Transfer":"transfer-0404040404040404040404040404040404040404040404040404040404040404"}}"#, - ); - - let key_deploy_info = Key::DeployInfo(DeployHash::new([5; DEPLOY_HASH_LENGTH])); - check_to_json( - Some(key_deploy_info), - r#"{"cl_type":{"Option":"Key"},"parsed":{"DeployInfo":"deploy-0505050505050505050505050505050505050505050505050505050505050505"}}"#, - ); - - check_to_json( - Option::::None, - r#"{"cl_type":{"Option":"Key"},"parsed":null}"#, - ) - } - - #[test] - fn uref_cl_value_should_encode_to_json() { - let uref = URef::new([6; UREF_ADDR_LENGTH], AccessRights::READ_ADD_WRITE); - check_to_json( - Some(uref), - r#"{"cl_type":{"Option":"URef"},"parsed":"uref-0606060606060606060606060606060606060606060606060606060606060606-007"}"#, - ); - check_to_json( - Option::::None, - r#"{"cl_type":{"Option":"URef"},"parsed":null}"#, - ) - } - - #[test] - fn public_key_cl_value_should_encode_to_json() { - check_to_json( - Some(PublicKey::from( - &SecretKey::ed25519_from_bytes([7; SecretKey::ED25519_LENGTH]).unwrap(), - )), - r#"{"cl_type":{"Option":"PublicKey"},"parsed":"01ea4a6c63e29c520abef5507b132ec5f9954776aebebe7b92421eea691446d22c"}"#, - ); - check_to_json( - Some(PublicKey::from( - &SecretKey::secp256k1_from_bytes([8; SecretKey::SECP256K1_LENGTH]).unwrap(), - )), - r#"{"cl_type":{"Option":"PublicKey"},"parsed":"0203f991f944d1e1954a7fc8b9bf62e0d78f015f4c07762d505e20e6c45260a3661b"}"#, - ); - check_to_json( - Option::::None, - r#"{"cl_type":{"Option":"PublicKey"},"parsed":null}"#, - ) - } - } - - mod result { - use super::*; - use crate::crypto::SecretKey; - - #[test] - fn bool_cl_value_should_encode_to_json() { - check_to_json( - Result::::Ok(true), - r#"{"cl_type":{"Result":{"ok":"Bool","err":"I32"}},"parsed":{"Ok":true}}"#, - ); - check_to_json( - Result::::Ok(true), - r#"{"cl_type":{"Result":{"ok":"Bool","err":"U32"}},"parsed":{"Ok":true}}"#, - ); - check_to_json( - Result::::Ok(true), - r#"{"cl_type":{"Result":{"ok":"Bool","err":"Unit"}},"parsed":{"Ok":true}}"#, - ); - check_to_json( - Result::::Ok(true), - r#"{"cl_type":{"Result":{"ok":"Bool","err":"String"}},"parsed":{"Ok":true}}"#, - ); - check_to_json( - Result::::Err(-1), - r#"{"cl_type":{"Result":{"ok":"Bool","err":"I32"}},"parsed":{"Err":-1}}"#, - ); - check_to_json( - Result::::Err(1), - r#"{"cl_type":{"Result":{"ok":"Bool","err":"U32"}},"parsed":{"Err":1}}"#, - ); - check_to_json( - Result::::Err(()), - r#"{"cl_type":{"Result":{"ok":"Bool","err":"Unit"}},"parsed":{"Err":null}}"#, - ); - check_to_json( - Result::::Err("e".to_string()), - r#"{"cl_type":{"Result":{"ok":"Bool","err":"String"}},"parsed":{"Err":"e"}}"#, - ); - } - - #[test] - fn i32_cl_value_should_encode_to_json() { - check_to_json( - Result::::Ok(-1), - r#"{"cl_type":{"Result":{"ok":"I32","err":"I32"}},"parsed":{"Ok":-1}}"#, - ); - check_to_json( - Result::::Ok(-1), - r#"{"cl_type":{"Result":{"ok":"I32","err":"U32"}},"parsed":{"Ok":-1}}"#, - ); - check_to_json( - Result::::Ok(-1), - r#"{"cl_type":{"Result":{"ok":"I32","err":"Unit"}},"parsed":{"Ok":-1}}"#, - ); - check_to_json( - Result::::Ok(-1), - r#"{"cl_type":{"Result":{"ok":"I32","err":"String"}},"parsed":{"Ok":-1}}"#, - ); - check_to_json( - Result::::Err(-1), - r#"{"cl_type":{"Result":{"ok":"I32","err":"I32"}},"parsed":{"Err":-1}}"#, - ); - check_to_json( - Result::::Err(1), - r#"{"cl_type":{"Result":{"ok":"I32","err":"U32"}},"parsed":{"Err":1}}"#, - ); - check_to_json( - Result::::Err(()), - r#"{"cl_type":{"Result":{"ok":"I32","err":"Unit"}},"parsed":{"Err":null}}"#, - ); - check_to_json( - Result::::Err("e".to_string()), - r#"{"cl_type":{"Result":{"ok":"I32","err":"String"}},"parsed":{"Err":"e"}}"#, - ); - } - - #[test] - fn i64_cl_value_should_encode_to_json() { - check_to_json( - Result::::Ok(-1), - r#"{"cl_type":{"Result":{"ok":"I64","err":"I32"}},"parsed":{"Ok":-1}}"#, - ); - check_to_json( - Result::::Ok(-1), - r#"{"cl_type":{"Result":{"ok":"I64","err":"U32"}},"parsed":{"Ok":-1}}"#, - ); - check_to_json( - Result::::Ok(-1), - r#"{"cl_type":{"Result":{"ok":"I64","err":"Unit"}},"parsed":{"Ok":-1}}"#, - ); - check_to_json( - Result::::Ok(-1), - r#"{"cl_type":{"Result":{"ok":"I64","err":"String"}},"parsed":{"Ok":-1}}"#, - ); - check_to_json( - Result::::Err(-1), - r#"{"cl_type":{"Result":{"ok":"I64","err":"I32"}},"parsed":{"Err":-1}}"#, - ); - check_to_json( - Result::::Err(1), - r#"{"cl_type":{"Result":{"ok":"I64","err":"U32"}},"parsed":{"Err":1}}"#, - ); - check_to_json( - Result::::Err(()), - r#"{"cl_type":{"Result":{"ok":"I64","err":"Unit"}},"parsed":{"Err":null}}"#, - ); - check_to_json( - Result::::Err("e".to_string()), - r#"{"cl_type":{"Result":{"ok":"I64","err":"String"}},"parsed":{"Err":"e"}}"#, - ); - } - - #[test] - fn u8_cl_value_should_encode_to_json() { - check_to_json( - Result::::Ok(1), - r#"{"cl_type":{"Result":{"ok":"U8","err":"I32"}},"parsed":{"Ok":1}}"#, - ); - check_to_json( - Result::::Ok(1), - r#"{"cl_type":{"Result":{"ok":"U8","err":"U32"}},"parsed":{"Ok":1}}"#, - ); - check_to_json( - Result::::Ok(1), - r#"{"cl_type":{"Result":{"ok":"U8","err":"Unit"}},"parsed":{"Ok":1}}"#, - ); - check_to_json( - Result::::Ok(1), - r#"{"cl_type":{"Result":{"ok":"U8","err":"String"}},"parsed":{"Ok":1}}"#, - ); - check_to_json( - Result::::Err(-1), - r#"{"cl_type":{"Result":{"ok":"U8","err":"I32"}},"parsed":{"Err":-1}}"#, - ); - check_to_json( - Result::::Err(1), - r#"{"cl_type":{"Result":{"ok":"U8","err":"U32"}},"parsed":{"Err":1}}"#, - ); - check_to_json( - Result::::Err(()), - r#"{"cl_type":{"Result":{"ok":"U8","err":"Unit"}},"parsed":{"Err":null}}"#, - ); - check_to_json( - Result::::Err("e".to_string()), - r#"{"cl_type":{"Result":{"ok":"U8","err":"String"}},"parsed":{"Err":"e"}}"#, - ); - } - - #[test] - fn u32_cl_value_should_encode_to_json() { - check_to_json( - Result::::Ok(1), - r#"{"cl_type":{"Result":{"ok":"U32","err":"I32"}},"parsed":{"Ok":1}}"#, - ); - check_to_json( - Result::::Ok(1), - r#"{"cl_type":{"Result":{"ok":"U32","err":"U32"}},"parsed":{"Ok":1}}"#, - ); - check_to_json( - Result::::Ok(1), - r#"{"cl_type":{"Result":{"ok":"U32","err":"Unit"}},"parsed":{"Ok":1}}"#, - ); - check_to_json( - Result::::Ok(1), - r#"{"cl_type":{"Result":{"ok":"U32","err":"String"}},"parsed":{"Ok":1}}"#, - ); - check_to_json( - Result::::Err(-1), - r#"{"cl_type":{"Result":{"ok":"U32","err":"I32"}},"parsed":{"Err":-1}}"#, - ); - check_to_json( - Result::::Err(1), - r#"{"cl_type":{"Result":{"ok":"U32","err":"U32"}},"parsed":{"Err":1}}"#, - ); - check_to_json( - Result::::Err(()), - r#"{"cl_type":{"Result":{"ok":"U32","err":"Unit"}},"parsed":{"Err":null}}"#, - ); - check_to_json( - Result::::Err("e".to_string()), - r#"{"cl_type":{"Result":{"ok":"U32","err":"String"}},"parsed":{"Err":"e"}}"#, - ); - } - - #[test] - fn u64_cl_value_should_encode_to_json() { - check_to_json( - Result::::Ok(1), - r#"{"cl_type":{"Result":{"ok":"U64","err":"I32"}},"parsed":{"Ok":1}}"#, - ); - check_to_json( - Result::::Ok(1), - r#"{"cl_type":{"Result":{"ok":"U64","err":"U32"}},"parsed":{"Ok":1}}"#, - ); - check_to_json( - Result::::Ok(1), - r#"{"cl_type":{"Result":{"ok":"U64","err":"Unit"}},"parsed":{"Ok":1}}"#, - ); - check_to_json( - Result::::Ok(1), - r#"{"cl_type":{"Result":{"ok":"U64","err":"String"}},"parsed":{"Ok":1}}"#, - ); - check_to_json( - Result::::Err(-1), - r#"{"cl_type":{"Result":{"ok":"U64","err":"I32"}},"parsed":{"Err":-1}}"#, - ); - check_to_json( - Result::::Err(1), - r#"{"cl_type":{"Result":{"ok":"U64","err":"U32"}},"parsed":{"Err":1}}"#, - ); - check_to_json( - Result::::Err(()), - r#"{"cl_type":{"Result":{"ok":"U64","err":"Unit"}},"parsed":{"Err":null}}"#, - ); - check_to_json( - Result::::Err("e".to_string()), - r#"{"cl_type":{"Result":{"ok":"U64","err":"String"}},"parsed":{"Err":"e"}}"#, - ); - } - - #[test] - fn u128_cl_value_should_encode_to_json() { - check_to_json( - Result::::Ok(1.into()), - r#"{"cl_type":{"Result":{"ok":"U128","err":"I32"}},"parsed":{"Ok":"1"}}"#, - ); - check_to_json( - Result::::Ok(1.into()), - r#"{"cl_type":{"Result":{"ok":"U128","err":"U32"}},"parsed":{"Ok":"1"}}"#, - ); - check_to_json( - Result::::Ok(1.into()), - r#"{"cl_type":{"Result":{"ok":"U128","err":"Unit"}},"parsed":{"Ok":"1"}}"#, - ); - check_to_json( - Result::::Ok(1.into()), - r#"{"cl_type":{"Result":{"ok":"U128","err":"String"}},"parsed":{"Ok":"1"}}"#, - ); - check_to_json( - Result::::Err(-1), - r#"{"cl_type":{"Result":{"ok":"U128","err":"I32"}},"parsed":{"Err":-1}}"#, - ); - check_to_json( - Result::::Err(1), - r#"{"cl_type":{"Result":{"ok":"U128","err":"U32"}},"parsed":{"Err":1}}"#, - ); - check_to_json( - Result::::Err(()), - r#"{"cl_type":{"Result":{"ok":"U128","err":"Unit"}},"parsed":{"Err":null}}"#, - ); - check_to_json( - Result::::Err("e".to_string()), - r#"{"cl_type":{"Result":{"ok":"U128","err":"String"}},"parsed":{"Err":"e"}}"#, - ); - } - - #[test] - fn u256_cl_value_should_encode_to_json() { - check_to_json( - Result::::Ok(1.into()), - r#"{"cl_type":{"Result":{"ok":"U256","err":"I32"}},"parsed":{"Ok":"1"}}"#, - ); - check_to_json( - Result::::Ok(1.into()), - r#"{"cl_type":{"Result":{"ok":"U256","err":"U32"}},"parsed":{"Ok":"1"}}"#, - ); - check_to_json( - Result::::Ok(1.into()), - r#"{"cl_type":{"Result":{"ok":"U256","err":"Unit"}},"parsed":{"Ok":"1"}}"#, - ); - check_to_json( - Result::::Ok(1.into()), - r#"{"cl_type":{"Result":{"ok":"U256","err":"String"}},"parsed":{"Ok":"1"}}"#, - ); - check_to_json( - Result::::Err(-1), - r#"{"cl_type":{"Result":{"ok":"U256","err":"I32"}},"parsed":{"Err":-1}}"#, - ); - check_to_json( - Result::::Err(1), - r#"{"cl_type":{"Result":{"ok":"U256","err":"U32"}},"parsed":{"Err":1}}"#, - ); - check_to_json( - Result::::Err(()), - r#"{"cl_type":{"Result":{"ok":"U256","err":"Unit"}},"parsed":{"Err":null}}"#, - ); - check_to_json( - Result::::Err("e".to_string()), - r#"{"cl_type":{"Result":{"ok":"U256","err":"String"}},"parsed":{"Err":"e"}}"#, - ); - } - - #[test] - fn u512_cl_value_should_encode_to_json() { - check_to_json( - Result::::Ok(1.into()), - r#"{"cl_type":{"Result":{"ok":"U512","err":"I32"}},"parsed":{"Ok":"1"}}"#, - ); - check_to_json( - Result::::Ok(1.into()), - r#"{"cl_type":{"Result":{"ok":"U512","err":"U32"}},"parsed":{"Ok":"1"}}"#, - ); - check_to_json( - Result::::Ok(1.into()), - r#"{"cl_type":{"Result":{"ok":"U512","err":"Unit"}},"parsed":{"Ok":"1"}}"#, - ); - check_to_json( - Result::::Ok(1.into()), - r#"{"cl_type":{"Result":{"ok":"U512","err":"String"}},"parsed":{"Ok":"1"}}"#, - ); - check_to_json( - Result::::Err(-1), - r#"{"cl_type":{"Result":{"ok":"U512","err":"I32"}},"parsed":{"Err":-1}}"#, - ); - check_to_json( - Result::::Err(1), - r#"{"cl_type":{"Result":{"ok":"U512","err":"U32"}},"parsed":{"Err":1}}"#, - ); - check_to_json( - Result::::Err(()), - r#"{"cl_type":{"Result":{"ok":"U512","err":"Unit"}},"parsed":{"Err":null}}"#, - ); - check_to_json( - Result::::Err("e".to_string()), - r#"{"cl_type":{"Result":{"ok":"U512","err":"String"}},"parsed":{"Err":"e"}}"#, - ); - } - - #[test] - fn unit_cl_value_should_encode_to_json() { - check_to_json( - Result::<(), i32>::Ok(()), - r#"{"cl_type":{"Result":{"ok":"Unit","err":"I32"}},"parsed":{"Ok":null}}"#, - ); - check_to_json( - Result::<(), u32>::Ok(()), - r#"{"cl_type":{"Result":{"ok":"Unit","err":"U32"}},"parsed":{"Ok":null}}"#, - ); - check_to_json( - Result::<(), ()>::Ok(()), - r#"{"cl_type":{"Result":{"ok":"Unit","err":"Unit"}},"parsed":{"Ok":null}}"#, - ); - check_to_json( - Result::<(), String>::Ok(()), - r#"{"cl_type":{"Result":{"ok":"Unit","err":"String"}},"parsed":{"Ok":null}}"#, - ); - check_to_json( - Result::<(), i32>::Err(-1), - r#"{"cl_type":{"Result":{"ok":"Unit","err":"I32"}},"parsed":{"Err":-1}}"#, - ); - check_to_json( - Result::<(), u32>::Err(1), - r#"{"cl_type":{"Result":{"ok":"Unit","err":"U32"}},"parsed":{"Err":1}}"#, - ); - check_to_json( - Result::<(), ()>::Err(()), - r#"{"cl_type":{"Result":{"ok":"Unit","err":"Unit"}},"parsed":{"Err":null}}"#, - ); - check_to_json( - Result::<(), String>::Err("e".to_string()), - r#"{"cl_type":{"Result":{"ok":"Unit","err":"String"}},"parsed":{"Err":"e"}}"#, - ); - } - - #[test] - fn string_cl_value_should_encode_to_json() { - check_to_json( - Result::::Ok("test string".to_string()), - r#"{"cl_type":{"Result":{"ok":"String","err":"I32"}},"parsed":{"Ok":"test string"}}"#, - ); - check_to_json( - Result::::Ok("test string".to_string()), - r#"{"cl_type":{"Result":{"ok":"String","err":"U32"}},"parsed":{"Ok":"test string"}}"#, - ); - check_to_json( - Result::::Ok("test string".to_string()), - r#"{"cl_type":{"Result":{"ok":"String","err":"Unit"}},"parsed":{"Ok":"test string"}}"#, - ); - check_to_json( - Result::::Ok("test string".to_string()), - r#"{"cl_type":{"Result":{"ok":"String","err":"String"}},"parsed":{"Ok":"test string"}}"#, - ); - check_to_json( - Result::::Err(-1), - r#"{"cl_type":{"Result":{"ok":"String","err":"I32"}},"parsed":{"Err":-1}}"#, - ); - check_to_json( - Result::::Err(1), - r#"{"cl_type":{"Result":{"ok":"String","err":"U32"}},"parsed":{"Err":1}}"#, - ); - check_to_json( - Result::::Err(()), - r#"{"cl_type":{"Result":{"ok":"String","err":"Unit"}},"parsed":{"Err":null}}"#, - ); - check_to_json( - Result::::Err("e".to_string()), - r#"{"cl_type":{"Result":{"ok":"String","err":"String"}},"parsed":{"Err":"e"}}"#, - ); - } - - #[test] - fn key_cl_value_should_encode_to_json() { - let key = Key::Hash([2; KEY_HASH_LENGTH]); - check_to_json( - Result::::Ok(key), - r#"{"cl_type":{"Result":{"ok":"Key","err":"I32"}},"parsed":{"Ok":{"Hash":"hash-0202020202020202020202020202020202020202020202020202020202020202"}}}"#, - ); - check_to_json( - Result::::Ok(key), - r#"{"cl_type":{"Result":{"ok":"Key","err":"U32"}},"parsed":{"Ok":{"Hash":"hash-0202020202020202020202020202020202020202020202020202020202020202"}}}"#, - ); - check_to_json( - Result::::Ok(key), - r#"{"cl_type":{"Result":{"ok":"Key","err":"Unit"}},"parsed":{"Ok":{"Hash":"hash-0202020202020202020202020202020202020202020202020202020202020202"}}}"#, - ); - check_to_json( - Result::::Ok(key), - r#"{"cl_type":{"Result":{"ok":"Key","err":"String"}},"parsed":{"Ok":{"Hash":"hash-0202020202020202020202020202020202020202020202020202020202020202"}}}"#, - ); - check_to_json( - Result::::Err(-1), - r#"{"cl_type":{"Result":{"ok":"Key","err":"I32"}},"parsed":{"Err":-1}}"#, - ); - check_to_json( - Result::::Err(1), - r#"{"cl_type":{"Result":{"ok":"Key","err":"U32"}},"parsed":{"Err":1}}"#, - ); - check_to_json( - Result::::Err(()), - r#"{"cl_type":{"Result":{"ok":"Key","err":"Unit"}},"parsed":{"Err":null}}"#, - ); - check_to_json( - Result::::Err("e".to_string()), - r#"{"cl_type":{"Result":{"ok":"Key","err":"String"}},"parsed":{"Err":"e"}}"#, - ); - } - - #[test] - fn uref_cl_value_should_encode_to_json() { - let uref = URef::new([6; UREF_ADDR_LENGTH], AccessRights::READ_ADD_WRITE); - check_to_json( - Result::::Ok(uref), - r#"{"cl_type":{"Result":{"ok":"URef","err":"I32"}},"parsed":{"Ok":"uref-0606060606060606060606060606060606060606060606060606060606060606-007"}}"#, - ); - check_to_json( - Result::::Ok(uref), - r#"{"cl_type":{"Result":{"ok":"URef","err":"U32"}},"parsed":{"Ok":"uref-0606060606060606060606060606060606060606060606060606060606060606-007"}}"#, - ); - check_to_json( - Result::::Ok(uref), - r#"{"cl_type":{"Result":{"ok":"URef","err":"Unit"}},"parsed":{"Ok":"uref-0606060606060606060606060606060606060606060606060606060606060606-007"}}"#, - ); - check_to_json( - Result::::Ok(uref), - r#"{"cl_type":{"Result":{"ok":"URef","err":"String"}},"parsed":{"Ok":"uref-0606060606060606060606060606060606060606060606060606060606060606-007"}}"#, - ); - check_to_json( - Result::::Err(-1), - r#"{"cl_type":{"Result":{"ok":"URef","err":"I32"}},"parsed":{"Err":-1}}"#, - ); - check_to_json( - Result::::Err(1), - r#"{"cl_type":{"Result":{"ok":"URef","err":"U32"}},"parsed":{"Err":1}}"#, - ); - check_to_json( - Result::::Err(()), - r#"{"cl_type":{"Result":{"ok":"URef","err":"Unit"}},"parsed":{"Err":null}}"#, - ); - check_to_json( - Result::::Err("e".to_string()), - r#"{"cl_type":{"Result":{"ok":"URef","err":"String"}},"parsed":{"Err":"e"}}"#, - ); - } - - #[test] - fn public_key_cl_value_should_encode_to_json() { - let secret_key = - SecretKey::secp256k1_from_bytes([8; SecretKey::SECP256K1_LENGTH]).unwrap(); - let public_key = PublicKey::from(&secret_key); - check_to_json( - Result::::Ok(public_key.clone()), - r#"{"cl_type":{"Result":{"ok":"PublicKey","err":"I32"}},"parsed":{"Ok":"0203f991f944d1e1954a7fc8b9bf62e0d78f015f4c07762d505e20e6c45260a3661b"}}"#, - ); - check_to_json( - Result::::Ok(public_key.clone()), - r#"{"cl_type":{"Result":{"ok":"PublicKey","err":"U32"}},"parsed":{"Ok":"0203f991f944d1e1954a7fc8b9bf62e0d78f015f4c07762d505e20e6c45260a3661b"}}"#, - ); - check_to_json( - Result::::Ok(public_key.clone()), - r#"{"cl_type":{"Result":{"ok":"PublicKey","err":"Unit"}},"parsed":{"Ok":"0203f991f944d1e1954a7fc8b9bf62e0d78f015f4c07762d505e20e6c45260a3661b"}}"#, - ); - check_to_json( - Result::::Ok(public_key), - r#"{"cl_type":{"Result":{"ok":"PublicKey","err":"String"}},"parsed":{"Ok":"0203f991f944d1e1954a7fc8b9bf62e0d78f015f4c07762d505e20e6c45260a3661b"}}"#, - ); - check_to_json( - Result::::Err(-1), - r#"{"cl_type":{"Result":{"ok":"PublicKey","err":"I32"}},"parsed":{"Err":-1}}"#, - ); - check_to_json( - Result::::Err(1), - r#"{"cl_type":{"Result":{"ok":"PublicKey","err":"U32"}},"parsed":{"Err":1}}"#, - ); - check_to_json( - Result::::Err(()), - r#"{"cl_type":{"Result":{"ok":"PublicKey","err":"Unit"}},"parsed":{"Err":null}}"#, - ); - check_to_json( - Result::::Err("e".to_string()), - r#"{"cl_type":{"Result":{"ok":"PublicKey","err":"String"}},"parsed":{"Err":"e"}}"#, - ); - } - } -} diff --git a/casper_types/src/cl_value/jsonrepr.rs b/casper_types/src/cl_value/jsonrepr.rs deleted file mode 100644 index 1b3b3e28..00000000 --- a/casper_types/src/cl_value/jsonrepr.rs +++ /dev/null @@ -1,272 +0,0 @@ -use alloc::{string::String, vec, vec::Vec}; - -use serde::Serialize; -use serde_json::{json, Value}; - -use crate::{ - bytesrepr::{self, FromBytes, OPTION_NONE_TAG, OPTION_SOME_TAG, RESULT_ERR_TAG, RESULT_OK_TAG}, - cl_type::CL_TYPE_RECURSION_DEPTH, - CLType, CLValue, Key, PublicKey, URef, U128, U256, U512, -}; - -/// Returns a best-effort attempt to convert the `CLValue` into a meaningful JSON value. -pub fn cl_value_to_json(cl_value: &CLValue) -> Option { - depth_limited_to_json(0, cl_value.cl_type(), cl_value.inner_bytes()).and_then( - |(json_value, remainder)| { - if remainder.is_empty() { - Some(json_value) - } else { - None - } - }, - ) -} - -fn depth_limited_to_json<'a>( - depth: u8, - cl_type: &CLType, - bytes: &'a [u8], -) -> Option<(Value, &'a [u8])> { - if depth >= CL_TYPE_RECURSION_DEPTH { - return None; - } - let depth = depth + 1; - - match cl_type { - CLType::Bool => simple_type_to_json::(bytes), - CLType::I32 => simple_type_to_json::(bytes), - CLType::I64 => simple_type_to_json::(bytes), - CLType::U8 => simple_type_to_json::(bytes), - CLType::U32 => simple_type_to_json::(bytes), - CLType::U64 => simple_type_to_json::(bytes), - CLType::U128 => simple_type_to_json::(bytes), - CLType::U256 => simple_type_to_json::(bytes), - CLType::U512 => simple_type_to_json::(bytes), - CLType::Unit => simple_type_to_json::<()>(bytes), - CLType::String => simple_type_to_json::(bytes), - CLType::Key => simple_type_to_json::(bytes), - CLType::URef => simple_type_to_json::(bytes), - CLType::PublicKey => simple_type_to_json::(bytes), - CLType::Option(inner_cl_type) => { - let (variant, remainder) = u8::from_bytes(bytes).ok()?; - match variant { - OPTION_NONE_TAG => Some((Value::Null, remainder)), - OPTION_SOME_TAG => Some(depth_limited_to_json(depth, inner_cl_type, remainder)?), - _ => None, - } - } - CLType::List(inner_cl_type) => { - let (count, mut stream) = u32::from_bytes(bytes).ok()?; - let mut result: Vec = Vec::new(); - for _ in 0..count { - let (value, remainder) = depth_limited_to_json(depth, inner_cl_type, stream)?; - result.push(value); - stream = remainder; - } - Some((json!(result), stream)) - } - CLType::ByteArray(length) => { - let (bytes, remainder) = bytesrepr::safe_split_at(bytes, *length as usize).ok()?; - let hex_encoded_bytes = base16::encode_lower(&bytes); - Some((json![hex_encoded_bytes], remainder)) - } - CLType::Result { ok, err } => { - let (variant, remainder) = u8::from_bytes(bytes).ok()?; - match variant { - RESULT_ERR_TAG => { - let (value, remainder) = depth_limited_to_json(depth, err, remainder)?; - Some((json!({ "Err": value }), remainder)) - } - RESULT_OK_TAG => { - let (value, remainder) = depth_limited_to_json(depth, ok, remainder)?; - Some((json!({ "Ok": value }), remainder)) - } - _ => None, - } - } - CLType::Map { key, value } => { - let (num_keys, mut stream) = u32::from_bytes(bytes).ok()?; - let mut result: Vec = Vec::new(); - for _ in 0..num_keys { - let (k, remainder) = depth_limited_to_json(depth, key, stream)?; - let (v, remainder) = depth_limited_to_json(depth, value, remainder)?; - result.push(json!({"key": k, "value": v})); - stream = remainder; - } - Some((json!(result), stream)) - } - CLType::Tuple1(arr) => { - let (t1, remainder) = depth_limited_to_json(depth, &arr[0], bytes)?; - Some((json!([t1]), remainder)) - } - CLType::Tuple2(arr) => { - let (t1, remainder) = depth_limited_to_json(depth, &arr[0], bytes)?; - let (t2, remainder) = depth_limited_to_json(depth, &arr[1], remainder)?; - Some((json!([t1, t2]), remainder)) - } - CLType::Tuple3(arr) => { - let (t1, remainder) = depth_limited_to_json(depth, &arr[0], bytes)?; - let (t2, remainder) = depth_limited_to_json(depth, &arr[1], remainder)?; - let (t3, remainder) = depth_limited_to_json(depth, &arr[2], remainder)?; - Some((json!([t1, t2, t3]), remainder)) - } - CLType::Any => None, - } -} - -fn simple_type_to_json(bytes: &[u8]) -> Option<(Value, &[u8])> { - let (value, remainder) = T::from_bytes(bytes).ok()?; - Some((json!(value), remainder)) -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::{bytesrepr::ToBytes, AsymmetricType, CLTyped, SecretKey}; - use alloc::collections::BTreeMap; - - fn test_value(value: T) { - let cl_value = CLValue::from_t(value.clone()).unwrap(); - let cl_value_as_json: Value = cl_value_to_json(&cl_value).unwrap(); - let expected = json!(value); - assert_eq!(cl_value_as_json, expected); - } - - #[test] - fn list_of_ints_to_json_value() { - test_value::>(vec![]); - test_value(vec![10u32, 12u32]); - } - - #[test] - fn list_of_bools_to_json_value() { - test_value(vec![true, false]); - } - - #[test] - fn list_of_string_to_json_value() { - test_value(vec!["rust", "python"]); - } - - #[test] - fn list_of_public_keys_to_json_value() { - let a = PublicKey::from( - &SecretKey::secp256k1_from_bytes([3; SecretKey::SECP256K1_LENGTH]).unwrap(), - ); - let b = PublicKey::from( - &SecretKey::ed25519_from_bytes([3; SecretKey::ED25519_LENGTH]).unwrap(), - ); - let a_hex = a.to_hex(); - let b_hex = b.to_hex(); - let cl_value = CLValue::from_t(vec![a, b]).unwrap(); - let cl_value_as_json: Value = cl_value_to_json(&cl_value).unwrap(); - let expected = json!([a_hex, b_hex]); - assert_eq!(cl_value_as_json, expected); - } - - #[test] - fn list_of_list_of_public_keys_to_json_value() { - let a = PublicKey::from( - &SecretKey::secp256k1_from_bytes([3; SecretKey::SECP256K1_LENGTH]).unwrap(), - ); - let b = PublicKey::from( - &SecretKey::ed25519_from_bytes([3; PublicKey::ED25519_LENGTH]).unwrap(), - ); - let c = PublicKey::from( - &SecretKey::ed25519_from_bytes([6; PublicKey::ED25519_LENGTH]).unwrap(), - ); - let a_hex = a.to_hex(); - let b_hex = b.to_hex(); - let c_hex = c.to_hex(); - let cl_value = CLValue::from_t(vec![vec![a, b], vec![c]]).unwrap(); - let cl_value_as_json: Value = cl_value_to_json(&cl_value).unwrap(); - let expected = json!([[a_hex, b_hex], [c_hex]]); - assert_eq!(cl_value_as_json, expected); - } - - #[test] - fn map_of_string_to_list_of_ints_to_json_value() { - let key1 = String::from("first"); - let key2 = String::from("second"); - let value1 = vec![]; - let value2 = vec![1, 2, 3]; - let mut map: BTreeMap> = BTreeMap::new(); - map.insert(key1.clone(), value1.clone()); - map.insert(key2.clone(), value2.clone()); - let cl_value = CLValue::from_t(map).unwrap(); - let cl_value_as_json: Value = cl_value_to_json(&cl_value).unwrap(); - let expected = json!([ - { "key": key1, "value": value1 }, - { "key": key2, "value": value2 } - ]); - assert_eq!(cl_value_as_json, expected); - } - - #[test] - fn option_some_of_lists_to_json_value() { - test_value(Some(vec![1, 2, 3])); - } - - #[test] - fn option_none_to_json_value() { - test_value(Option::::None); - } - - #[test] - fn bytes_to_json_value() { - let bytes = [1_u8, 2]; - let cl_value = CLValue::from_t(bytes).unwrap(); - let cl_value_as_json = cl_value_to_json(&cl_value).unwrap(); - let expected = json!(base16::encode_lower(&bytes)); - assert_eq!(cl_value_as_json, expected); - } - - #[test] - fn result_ok_to_json_value() { - test_value(Result::, String>::Ok(vec![1, 2, 3])); - } - - #[test] - fn result_error_to_json_value() { - test_value(Result::, String>::Err(String::from("Upsss"))); - } - - #[test] - fn tuples_to_json_value() { - let v1 = String::from("Hello"); - let v2 = vec![1, 2, 3]; - let v3 = 1u8; - - test_value((v1.clone(),)); - test_value((v1.clone(), v2.clone())); - test_value((v1, v2, v3)); - } - - #[test] - fn json_encoding_nested_tuple_1_value_should_not_stack_overflow() { - // Returns a CLType corresponding to (((...(cl_type,),...),),) nested in tuples to - // `depth_limit`. - fn wrap_in_tuple1(cl_type: CLType, current_depth: usize, depth_limit: usize) -> CLType { - if current_depth == depth_limit { - return cl_type; - } - wrap_in_tuple1( - CLType::Tuple1([Box::new(cl_type)]), - current_depth + 1, - depth_limit, - ) - } - - for depth_limit in &[1, CL_TYPE_RECURSION_DEPTH as usize] { - let cl_type = wrap_in_tuple1(CLType::Unit, 1, *depth_limit); - let cl_value = CLValue::from_components(cl_type, vec![]); - assert!(cl_value_to_json(&cl_value).is_some()); - } - - for depth_limit in &[CL_TYPE_RECURSION_DEPTH as usize + 1, 1000] { - let cl_type = wrap_in_tuple1(CLType::Unit, 1, *depth_limit); - let cl_value = CLValue::from_components(cl_type, vec![]); - assert!(cl_value_to_json(&cl_value).is_none()); - } - } -} diff --git a/casper_types/src/contract_wasm.rs b/casper_types/src/contract_wasm.rs deleted file mode 100644 index aaca3817..00000000 --- a/casper_types/src/contract_wasm.rs +++ /dev/null @@ -1,372 +0,0 @@ -use alloc::{format, string::String, vec::Vec}; -use core::{ - array::TryFromSliceError, - convert::TryFrom, - fmt::{self, Debug, Display, Formatter}, -}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(feature = "json-schema")] -use schemars::{gen::SchemaGenerator, schema::Schema, JsonSchema}; -use serde::{de::Error as SerdeError, Deserialize, Deserializer, Serialize, Serializer}; - -use crate::{ - account, - account::TryFromSliceForAccountHashError, - bytesrepr::{Bytes, Error, FromBytes, ToBytes}, - checksummed_hex, uref, CLType, CLTyped, HashAddr, -}; - -const CONTRACT_WASM_MAX_DISPLAY_LEN: usize = 16; -const KEY_HASH_LENGTH: usize = 32; -const WASM_STRING_PREFIX: &str = "contract-wasm-"; - -/// Associated error type of `TryFrom<&[u8]>` for `ContractWasmHash`. -#[derive(Debug)] -pub struct TryFromSliceForContractHashError(()); - -#[derive(Debug)] -#[non_exhaustive] -pub enum FromStrError { - InvalidPrefix, - Hex(base16::DecodeError), - Account(TryFromSliceForAccountHashError), - Hash(TryFromSliceError), - AccountHash(account::FromStrError), - URef(uref::FromStrError), -} - -impl From for FromStrError { - fn from(error: base16::DecodeError) -> Self { - FromStrError::Hex(error) - } -} - -impl From for FromStrError { - fn from(error: TryFromSliceForAccountHashError) -> Self { - FromStrError::Account(error) - } -} - -impl From for FromStrError { - fn from(error: TryFromSliceError) -> Self { - FromStrError::Hash(error) - } -} - -impl From for FromStrError { - fn from(error: account::FromStrError) -> Self { - FromStrError::AccountHash(error) - } -} - -impl From for FromStrError { - fn from(error: uref::FromStrError) -> Self { - FromStrError::URef(error) - } -} - -impl Display for FromStrError { - fn fmt(&self, f: &mut Formatter) -> fmt::Result { - match self { - FromStrError::InvalidPrefix => write!(f, "invalid prefix"), - FromStrError::Hex(error) => write!(f, "decode from hex: {}", error), - FromStrError::Account(error) => write!(f, "account from string error: {:?}", error), - FromStrError::Hash(error) => write!(f, "hash from string error: {}", error), - FromStrError::AccountHash(error) => { - write!(f, "account hash from string error: {:?}", error) - } - FromStrError::URef(error) => write!(f, "uref from string error: {:?}", error), - } - } -} - -/// A newtype wrapping a `HashAddr` which is the raw bytes of -/// the ContractWasmHash -#[derive(Default, PartialOrd, Ord, PartialEq, Eq, Hash, Clone, Copy)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -pub struct ContractWasmHash(HashAddr); - -impl ContractWasmHash { - /// Constructs a new `ContractWasmHash` from the raw bytes of the contract wasm hash. - pub const fn new(value: HashAddr) -> ContractWasmHash { - ContractWasmHash(value) - } - - /// Returns the raw bytes of the contract hash as an array. - pub fn value(&self) -> HashAddr { - self.0 - } - - /// Returns the raw bytes of the contract hash as a `slice`. - pub fn as_bytes(&self) -> &[u8] { - &self.0 - } - - /// Formats the `ContractWasmHash` for users getting and putting. - pub fn to_formatted_string(self) -> String { - format!("{}{}", WASM_STRING_PREFIX, base16::encode_lower(&self.0),) - } - - /// Parses a string formatted as per `Self::to_formatted_string()` into a - /// `ContractWasmHash`. - pub fn from_formatted_str(input: &str) -> Result { - let remainder = input - .strip_prefix(WASM_STRING_PREFIX) - .ok_or(FromStrError::InvalidPrefix)?; - let bytes = HashAddr::try_from(checksummed_hex::decode(remainder)?.as_ref())?; - Ok(ContractWasmHash(bytes)) - } -} - -impl Display for ContractWasmHash { - fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result { - write!(f, "{}", base16::encode_lower(&self.0)) - } -} - -impl Debug for ContractWasmHash { - fn fmt(&self, f: &mut Formatter) -> core::fmt::Result { - write!(f, "ContractWasmHash({})", base16::encode_lower(&self.0)) - } -} - -impl CLTyped for ContractWasmHash { - fn cl_type() -> CLType { - CLType::ByteArray(KEY_HASH_LENGTH as u32) - } -} - -impl ToBytes for ContractWasmHash { - #[inline(always)] - fn to_bytes(&self) -> Result, Error> { - self.0.to_bytes() - } - - #[inline(always)] - fn serialized_length(&self) -> usize { - self.0.serialized_length() - } - - #[inline(always)] - fn write_bytes(&self, writer: &mut Vec) -> Result<(), Error> { - self.0.write_bytes(writer)?; - Ok(()) - } -} - -impl FromBytes for ContractWasmHash { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (bytes, rem) = FromBytes::from_bytes(bytes)?; - Ok((ContractWasmHash::new(bytes), rem)) - } -} - -impl From<[u8; 32]> for ContractWasmHash { - fn from(bytes: [u8; 32]) -> Self { - ContractWasmHash(bytes) - } -} - -impl Serialize for ContractWasmHash { - fn serialize(&self, serializer: S) -> Result { - if serializer.is_human_readable() { - self.to_formatted_string().serialize(serializer) - } else { - self.0.serialize(serializer) - } - } -} - -impl<'de> Deserialize<'de> for ContractWasmHash { - fn deserialize>(deserializer: D) -> Result { - if deserializer.is_human_readable() { - let formatted_string = String::deserialize(deserializer)?; - ContractWasmHash::from_formatted_str(&formatted_string).map_err(SerdeError::custom) - } else { - let bytes = HashAddr::deserialize(deserializer)?; - Ok(ContractWasmHash(bytes)) - } - } -} - -impl AsRef<[u8]> for ContractWasmHash { - fn as_ref(&self) -> &[u8] { - self.0.as_ref() - } -} - -impl TryFrom<&[u8]> for ContractWasmHash { - type Error = TryFromSliceForContractHashError; - - fn try_from(bytes: &[u8]) -> Result { - HashAddr::try_from(bytes) - .map(ContractWasmHash::new) - .map_err(|_| TryFromSliceForContractHashError(())) - } -} - -impl TryFrom<&Vec> for ContractWasmHash { - type Error = TryFromSliceForContractHashError; - - fn try_from(bytes: &Vec) -> Result { - HashAddr::try_from(bytes as &[u8]) - .map(ContractWasmHash::new) - .map_err(|_| TryFromSliceForContractHashError(())) - } -} - -#[cfg(feature = "json-schema")] -impl JsonSchema for ContractWasmHash { - fn schema_name() -> String { - String::from("ContractWasmHash") - } - - fn json_schema(gen: &mut SchemaGenerator) -> Schema { - let schema = gen.subschema_for::(); - let mut schema_object = schema.into_object(); - schema_object.metadata().description = - Some("The hash address of the contract wasm".to_string()); - schema_object.into() - } -} - -/// A container for contract's WASM bytes. -#[derive(PartialEq, Eq, Clone, Serialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -pub struct ContractWasm { - bytes: Bytes, -} - -impl Debug for ContractWasm { - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - if self.bytes.len() > CONTRACT_WASM_MAX_DISPLAY_LEN { - write!( - f, - "ContractWasm(0x{}...)", - base16::encode_lower(&self.bytes[..CONTRACT_WASM_MAX_DISPLAY_LEN]) - ) - } else { - write!(f, "ContractWasm(0x{})", base16::encode_lower(&self.bytes)) - } - } -} - -impl ContractWasm { - /// Creates new WASM object from bytes. - pub fn new(bytes: Vec) -> Self { - ContractWasm { - bytes: bytes.into(), - } - } - - /// Consumes instance of [`ContractWasm`] and returns its bytes. - pub fn take_bytes(self) -> Vec { - self.bytes.into() - } - - /// Returns a slice of contained WASM bytes. - pub fn bytes(&self) -> &[u8] { - self.bytes.as_ref() - } -} - -impl ToBytes for ContractWasm { - fn to_bytes(&self) -> Result, Error> { - self.bytes.to_bytes() - } - - fn serialized_length(&self) -> usize { - self.bytes.serialized_length() - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), Error> { - self.bytes.write_bytes(writer)?; - Ok(()) - } -} - -impl FromBytes for ContractWasm { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (bytes, rem1) = FromBytes::from_bytes(bytes)?; - Ok((ContractWasm { bytes }, rem1)) - } -} - -#[cfg(test)] -mod tests { - use super::*; - #[test] - fn test_debug_repr_of_short_wasm() { - const SIZE: usize = 8; - let wasm_bytes = vec![0; SIZE]; - let contract_wasm = ContractWasm::new(wasm_bytes); - // String output is less than the bytes itself - assert_eq!( - format!("{:?}", contract_wasm), - "ContractWasm(0x0000000000000000)" - ); - } - - #[test] - fn test_debug_repr_of_long_wasm() { - const SIZE: usize = 65; - let wasm_bytes = vec![0; SIZE]; - let contract_wasm = ContractWasm::new(wasm_bytes); - // String output is less than the bytes itself - assert_eq!( - format!("{:?}", contract_wasm), - "ContractWasm(0x00000000000000000000000000000000...)" - ); - } - - #[test] - fn contract_wasm_hash_from_slice() { - let bytes: Vec = (0..32).collect(); - let contract_hash = - HashAddr::try_from(&bytes[..]).expect("should create contract wasm hash"); - let contract_hash = ContractWasmHash::new(contract_hash); - assert_eq!(&bytes, &contract_hash.as_bytes()); - } - - #[test] - fn contract_wasm_hash_from_str() { - let contract_hash = ContractWasmHash([3; 32]); - let encoded = contract_hash.to_formatted_string(); - let decoded = ContractWasmHash::from_formatted_str(&encoded).unwrap(); - assert_eq!(contract_hash, decoded); - - let invalid_prefix = - "contractwasm-0000000000000000000000000000000000000000000000000000000000000000"; - assert!(ContractWasmHash::from_formatted_str(invalid_prefix).is_err()); - - let short_addr = - "contract-wasm-00000000000000000000000000000000000000000000000000000000000000"; - assert!(ContractWasmHash::from_formatted_str(short_addr).is_err()); - - let long_addr = - "contract-wasm-000000000000000000000000000000000000000000000000000000000000000000"; - assert!(ContractWasmHash::from_formatted_str(long_addr).is_err()); - - let invalid_hex = - "contract-wasm-000000000000000000000000000000000000000000000000000000000000000g"; - assert!(ContractWasmHash::from_formatted_str(invalid_hex).is_err()); - } - - #[test] - fn contract_wasm_hash_serde_roundtrip() { - let contract_hash = ContractWasmHash([255; 32]); - let serialized = bincode::serialize(&contract_hash).unwrap(); - let deserialized = bincode::deserialize(&serialized).unwrap(); - assert_eq!(contract_hash, deserialized) - } - - #[test] - fn contract_wasm_hash_json_roundtrip() { - let contract_hash = ContractWasmHash([255; 32]); - let json_string = serde_json::to_string_pretty(&contract_hash).unwrap(); - let decoded = serde_json::from_str(&json_string).unwrap(); - assert_eq!(contract_hash, decoded) - } -} diff --git a/casper_types/src/contracts.rs b/casper_types/src/contracts.rs deleted file mode 100644 index 4c39a798..00000000 --- a/casper_types/src/contracts.rs +++ /dev/null @@ -1,2106 +0,0 @@ -//! Data types for supporting contract headers feature. -// TODO - remove once schemars stops causing warning. -#![allow(clippy::field_reassign_with_default)] - -use alloc::{ - collections::{BTreeMap, BTreeSet}, - format, - string::{String, ToString}, - vec::Vec, -}; -use core::{ - array::TryFromSliceError, - convert::TryFrom, - fmt::{self, Debug, Display, Formatter}, -}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(feature = "json-schema")] -use schemars::{gen::SchemaGenerator, schema::Schema, JsonSchema}; -use serde::{de::Error as SerdeError, Deserialize, Deserializer, Serialize, Serializer}; - -use crate::{ - account, - account::TryFromSliceForAccountHashError, - bytesrepr::{self, FromBytes, ToBytes, U32_SERIALIZED_LENGTH}, - checksummed_hex, - contract_wasm::ContractWasmHash, - uref, - uref::URef, - CLType, CLTyped, ContextAccessRights, HashAddr, Key, ProtocolVersion, KEY_HASH_LENGTH, -}; - -/// Maximum number of distinct user groups. -pub const MAX_GROUPS: u8 = 10; -/// Maximum number of URefs which can be assigned across all user groups. -pub const MAX_TOTAL_UREFS: usize = 100; - -const CONTRACT_STRING_PREFIX: &str = "contract-"; -const PACKAGE_STRING_PREFIX: &str = "contract-package-"; -// We need to support the legacy prefix of "contract-package-wasm". -const PACKAGE_STRING_LEGACY_EXTRA_PREFIX: &str = "wasm"; - -/// Set of errors which may happen when working with contract headers. -#[derive(Debug, PartialEq, Eq)] -#[repr(u8)] -#[non_exhaustive] -pub enum Error { - /// Attempt to override an existing or previously existing version with a - /// new header (this is not allowed to ensure immutability of a given - /// version). - /// ``` - /// # use casper_types::contracts::Error; - /// assert_eq!(1, Error::PreviouslyUsedVersion as u8); - /// ``` - PreviouslyUsedVersion = 1, - /// Attempted to disable a contract that does not exist. - /// ``` - /// # use casper_types::contracts::Error; - /// assert_eq!(2, Error::ContractNotFound as u8); - /// ``` - ContractNotFound = 2, - /// Attempted to create a user group which already exists (use the update - /// function to change an existing user group). - /// ``` - /// # use casper_types::contracts::Error; - /// assert_eq!(3, Error::GroupAlreadyExists as u8); - /// ``` - GroupAlreadyExists = 3, - /// Attempted to add a new user group which exceeds the allowed maximum - /// number of groups. - /// ``` - /// # use casper_types::contracts::Error; - /// assert_eq!(4, Error::MaxGroupsExceeded as u8); - /// ``` - MaxGroupsExceeded = 4, - /// Attempted to add a new URef to a group, which resulted in the total - /// number of URefs across all user groups to exceed the allowed maximum. - /// ``` - /// # use casper_types::contracts::Error; - /// assert_eq!(5, Error::MaxTotalURefsExceeded as u8); - /// ``` - MaxTotalURefsExceeded = 5, - /// Attempted to remove a URef from a group, which does not exist in the - /// group. - /// ``` - /// # use casper_types::contracts::Error; - /// assert_eq!(6, Error::GroupDoesNotExist as u8); - /// ``` - GroupDoesNotExist = 6, - /// Attempted to remove unknown URef from the group. - /// ``` - /// # use casper_types::contracts::Error; - /// assert_eq!(7, Error::UnableToRemoveURef as u8); - /// ``` - UnableToRemoveURef = 7, - /// Group is use by at least one active contract. - /// ``` - /// # use casper_types::contracts::Error; - /// assert_eq!(8, Error::GroupInUse as u8); - /// ``` - GroupInUse = 8, - /// URef already exists in given group. - /// ``` - /// # use casper_types::contracts::Error; - /// assert_eq!(9, Error::URefAlreadyExists as u8); - /// ``` - URefAlreadyExists = 9, -} - -impl TryFrom for Error { - type Error = (); - - fn try_from(value: u8) -> Result { - let error = match value { - v if v == Self::PreviouslyUsedVersion as u8 => Self::PreviouslyUsedVersion, - v if v == Self::ContractNotFound as u8 => Self::ContractNotFound, - v if v == Self::GroupAlreadyExists as u8 => Self::GroupAlreadyExists, - v if v == Self::MaxGroupsExceeded as u8 => Self::MaxGroupsExceeded, - v if v == Self::MaxTotalURefsExceeded as u8 => Self::MaxTotalURefsExceeded, - v if v == Self::GroupDoesNotExist as u8 => Self::GroupDoesNotExist, - v if v == Self::UnableToRemoveURef as u8 => Self::UnableToRemoveURef, - v if v == Self::GroupInUse as u8 => Self::GroupInUse, - v if v == Self::URefAlreadyExists as u8 => Self::URefAlreadyExists, - _ => return Err(()), - }; - Ok(error) - } -} - -/// Associated error type of `TryFrom<&[u8]>` for `ContractHash`. -#[derive(Debug)] -pub struct TryFromSliceForContractHashError(()); - -impl Display for TryFromSliceForContractHashError { - fn fmt(&self, f: &mut Formatter) -> fmt::Result { - write!(f, "failed to retrieve from slice") - } -} - -/// An error from parsing a formatted contract string -#[derive(Debug)] -#[non_exhaustive] -pub enum FromStrError { - /// Invalid formatted string prefix. - InvalidPrefix, - /// Error when decoding a hex string - Hex(base16::DecodeError), - /// Error when parsing an account - Account(TryFromSliceForAccountHashError), - /// Error when parsing the hash. - Hash(TryFromSliceError), - /// Error when parsing an account hash. - AccountHash(account::FromStrError), - /// Error when parsing an uref. - URef(uref::FromStrError), -} - -impl From for FromStrError { - fn from(error: base16::DecodeError) -> Self { - FromStrError::Hex(error) - } -} - -impl From for FromStrError { - fn from(error: TryFromSliceForAccountHashError) -> Self { - FromStrError::Account(error) - } -} - -impl From for FromStrError { - fn from(error: TryFromSliceError) -> Self { - FromStrError::Hash(error) - } -} - -impl From for FromStrError { - fn from(error: account::FromStrError) -> Self { - FromStrError::AccountHash(error) - } -} - -impl From for FromStrError { - fn from(error: uref::FromStrError) -> Self { - FromStrError::URef(error) - } -} - -impl Display for FromStrError { - fn fmt(&self, f: &mut Formatter) -> fmt::Result { - match self { - FromStrError::InvalidPrefix => write!(f, "invalid prefix"), - FromStrError::Hex(error) => write!(f, "decode from hex: {}", error), - FromStrError::Account(error) => write!(f, "account from string error: {:?}", error), - FromStrError::Hash(error) => write!(f, "hash from string error: {}", error), - FromStrError::AccountHash(error) => { - write!(f, "account hash from string error: {:?}", error) - } - FromStrError::URef(error) => write!(f, "uref from string error: {:?}", error), - } - } -} - -/// A (labelled) "user group". Each method of a versioned contract may be -/// associated with one or more user groups which are allowed to call it. -#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -pub struct Group(String); - -impl Group { - /// Basic constructor - pub fn new>(s: T) -> Self { - Group(s.into()) - } - - /// Retrieves underlying name. - pub fn value(&self) -> &str { - &self.0 - } -} - -impl From for String { - fn from(group: Group) -> Self { - group.0 - } -} - -impl ToBytes for Group { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - self.0.to_bytes() - } - - fn serialized_length(&self) -> usize { - self.0.serialized_length() - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.value().write_bytes(writer)?; - Ok(()) - } -} - -impl FromBytes for Group { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - String::from_bytes(bytes).map(|(label, bytes)| (Group(label), bytes)) - } -} - -/// Automatically incremented value for a contract version within a major `ProtocolVersion`. -pub type ContractVersion = u32; - -/// Within each discrete major `ProtocolVersion`, contract version resets to this value. -pub const CONTRACT_INITIAL_VERSION: ContractVersion = 1; - -/// Major element of `ProtocolVersion` a `ContractVersion` is compatible with. -pub type ProtocolVersionMajor = u32; - -/// Major element of `ProtocolVersion` combined with `ContractVersion`. -#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Serialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -pub struct ContractVersionKey(ProtocolVersionMajor, ContractVersion); - -impl ContractVersionKey { - /// Returns a new instance of ContractVersionKey with provided values. - pub fn new( - protocol_version_major: ProtocolVersionMajor, - contract_version: ContractVersion, - ) -> Self { - Self(protocol_version_major, contract_version) - } - - /// Returns the major element of the protocol version this contract is compatible with. - pub fn protocol_version_major(self) -> ProtocolVersionMajor { - self.0 - } - - /// Returns the contract version within the protocol major version. - pub fn contract_version(self) -> ContractVersion { - self.1 - } -} - -impl From for (ProtocolVersionMajor, ContractVersion) { - fn from(contract_version_key: ContractVersionKey) -> Self { - (contract_version_key.0, contract_version_key.1) - } -} - -/// Serialized length of `ContractVersionKey`. -pub const CONTRACT_VERSION_KEY_SERIALIZED_LENGTH: usize = - U32_SERIALIZED_LENGTH + U32_SERIALIZED_LENGTH; - -impl ToBytes for ContractVersionKey { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut ret = bytesrepr::unchecked_allocate_buffer(self); - ret.append(&mut self.0.to_bytes()?); - ret.append(&mut self.1.to_bytes()?); - Ok(ret) - } - - fn serialized_length(&self) -> usize { - CONTRACT_VERSION_KEY_SERIALIZED_LENGTH - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.0.write_bytes(writer)?; - self.1.write_bytes(writer)?; - Ok(()) - } -} - -impl FromBytes for ContractVersionKey { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (major, rem): (u32, &[u8]) = FromBytes::from_bytes(bytes)?; - let (contract, rem): (ContractVersion, &[u8]) = FromBytes::from_bytes(rem)?; - Ok((ContractVersionKey::new(major, contract), rem)) - } -} - -impl fmt::Display for ContractVersionKey { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{}.{}", self.0, self.1) - } -} - -/// Collection of contract versions. -pub type ContractVersions = BTreeMap; - -/// Collection of disabled contract versions. The runtime will not permit disabled -/// contract versions to be executed. -pub type DisabledVersions = BTreeSet; - -/// Collection of named groups. -pub type Groups = BTreeMap>; - -/// A newtype wrapping a `HashAddr` which references a [`Contract`] in the global state. -#[derive(Default, PartialOrd, Ord, PartialEq, Eq, Hash, Clone, Copy)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -pub struct ContractHash(HashAddr); - -impl ContractHash { - /// Constructs a new `ContractHash` from the raw bytes of the contract hash. - pub const fn new(value: HashAddr) -> ContractHash { - ContractHash(value) - } - - /// Returns the raw bytes of the contract hash as an array. - pub fn value(&self) -> HashAddr { - self.0 - } - - /// Returns the raw bytes of the contract hash as a `slice`. - pub fn as_bytes(&self) -> &[u8] { - &self.0 - } - - /// Formats the `ContractHash` for users getting and putting. - pub fn to_formatted_string(self) -> String { - format!( - "{}{}", - CONTRACT_STRING_PREFIX, - base16::encode_lower(&self.0), - ) - } - - /// Parses a string formatted as per `Self::to_formatted_string()` into a - /// `ContractHash`. - pub fn from_formatted_str(input: &str) -> Result { - let remainder = input - .strip_prefix(CONTRACT_STRING_PREFIX) - .ok_or(FromStrError::InvalidPrefix)?; - let bytes = HashAddr::try_from(checksummed_hex::decode(remainder)?.as_ref())?; - Ok(ContractHash(bytes)) - } -} - -impl Display for ContractHash { - fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result { - write!(f, "{}", base16::encode_lower(&self.0)) - } -} - -impl Debug for ContractHash { - fn fmt(&self, f: &mut Formatter) -> core::fmt::Result { - write!(f, "ContractHash({})", base16::encode_lower(&self.0)) - } -} - -impl CLTyped for ContractHash { - fn cl_type() -> CLType { - CLType::ByteArray(KEY_HASH_LENGTH as u32) - } -} - -impl ToBytes for ContractHash { - #[inline(always)] - fn to_bytes(&self) -> Result, bytesrepr::Error> { - self.0.to_bytes() - } - - #[inline(always)] - fn serialized_length(&self) -> usize { - self.0.serialized_length() - } - - #[inline(always)] - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - writer.extend_from_slice(&self.0); - Ok(()) - } -} - -impl FromBytes for ContractHash { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (bytes, rem) = FromBytes::from_bytes(bytes)?; - Ok((ContractHash::new(bytes), rem)) - } -} - -impl From<[u8; 32]> for ContractHash { - fn from(bytes: [u8; 32]) -> Self { - ContractHash(bytes) - } -} - -impl Serialize for ContractHash { - fn serialize(&self, serializer: S) -> Result { - if serializer.is_human_readable() { - self.to_formatted_string().serialize(serializer) - } else { - self.0.serialize(serializer) - } - } -} - -impl<'de> Deserialize<'de> for ContractHash { - fn deserialize>(deserializer: D) -> Result { - if deserializer.is_human_readable() { - let formatted_string = String::deserialize(deserializer)?; - ContractHash::from_formatted_str(&formatted_string).map_err(SerdeError::custom) - } else { - let bytes = HashAddr::deserialize(deserializer)?; - Ok(ContractHash(bytes)) - } - } -} - -impl AsRef<[u8]> for ContractHash { - fn as_ref(&self) -> &[u8] { - self.0.as_ref() - } -} - -impl TryFrom<&[u8]> for ContractHash { - type Error = TryFromSliceForContractHashError; - - fn try_from(bytes: &[u8]) -> Result { - HashAddr::try_from(bytes) - .map(ContractHash::new) - .map_err(|_| TryFromSliceForContractHashError(())) - } -} - -impl TryFrom<&Vec> for ContractHash { - type Error = TryFromSliceForContractHashError; - - fn try_from(bytes: &Vec) -> Result { - HashAddr::try_from(bytes as &[u8]) - .map(ContractHash::new) - .map_err(|_| TryFromSliceForContractHashError(())) - } -} - -#[cfg(feature = "json-schema")] -impl JsonSchema for ContractHash { - fn schema_name() -> String { - String::from("ContractHash") - } - - fn json_schema(gen: &mut SchemaGenerator) -> Schema { - let schema = gen.subschema_for::(); - let mut schema_object = schema.into_object(); - schema_object.metadata().description = Some("The hash address of the contract".to_string()); - schema_object.into() - } -} - -/// A newtype wrapping a `HashAddr` which references a [`ContractPackage`] in the global state. -#[derive(Default, PartialOrd, Ord, PartialEq, Eq, Hash, Clone, Copy)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -pub struct ContractPackageHash(HashAddr); - -impl ContractPackageHash { - /// Constructs a new `ContractPackageHash` from the raw bytes of the contract package hash. - pub const fn new(value: HashAddr) -> ContractPackageHash { - ContractPackageHash(value) - } - - /// Returns the raw bytes of the contract hash as an array. - pub fn value(&self) -> HashAddr { - self.0 - } - - /// Returns the raw bytes of the contract hash as a `slice`. - pub fn as_bytes(&self) -> &[u8] { - &self.0 - } - - /// Formats the `ContractPackageHash` for users getting and putting. - pub fn to_formatted_string(self) -> String { - format!("{}{}", PACKAGE_STRING_PREFIX, base16::encode_lower(&self.0),) - } - - /// Parses a string formatted as per `Self::to_formatted_string()` into a - /// `ContractPackageHash`. - pub fn from_formatted_str(input: &str) -> Result { - let remainder = input - .strip_prefix(PACKAGE_STRING_PREFIX) - .ok_or(FromStrError::InvalidPrefix)?; - - let hex_addr = remainder - .strip_prefix(PACKAGE_STRING_LEGACY_EXTRA_PREFIX) - .unwrap_or(remainder); - - let bytes = HashAddr::try_from(checksummed_hex::decode(hex_addr)?.as_ref())?; - Ok(ContractPackageHash(bytes)) - } -} - -impl Display for ContractPackageHash { - fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result { - write!(f, "{}", base16::encode_lower(&self.0)) - } -} - -impl Debug for ContractPackageHash { - fn fmt(&self, f: &mut Formatter) -> core::fmt::Result { - write!(f, "ContractPackageHash({})", base16::encode_lower(&self.0)) - } -} - -impl CLTyped for ContractPackageHash { - fn cl_type() -> CLType { - CLType::ByteArray(KEY_HASH_LENGTH as u32) - } -} - -impl ToBytes for ContractPackageHash { - #[inline(always)] - fn to_bytes(&self) -> Result, bytesrepr::Error> { - self.0.to_bytes() - } - - #[inline(always)] - fn serialized_length(&self) -> usize { - self.0.serialized_length() - } - - #[inline(always)] - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - writer.extend_from_slice(&self.0); - Ok(()) - } -} - -impl FromBytes for ContractPackageHash { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (bytes, rem) = FromBytes::from_bytes(bytes)?; - Ok((ContractPackageHash::new(bytes), rem)) - } -} - -impl From<[u8; 32]> for ContractPackageHash { - fn from(bytes: [u8; 32]) -> Self { - ContractPackageHash(bytes) - } -} - -impl Serialize for ContractPackageHash { - fn serialize(&self, serializer: S) -> Result { - if serializer.is_human_readable() { - self.to_formatted_string().serialize(serializer) - } else { - self.0.serialize(serializer) - } - } -} - -impl<'de> Deserialize<'de> for ContractPackageHash { - fn deserialize>(deserializer: D) -> Result { - if deserializer.is_human_readable() { - let formatted_string = String::deserialize(deserializer)?; - ContractPackageHash::from_formatted_str(&formatted_string).map_err(SerdeError::custom) - } else { - let bytes = HashAddr::deserialize(deserializer)?; - Ok(ContractPackageHash(bytes)) - } - } -} - -impl AsRef<[u8]> for ContractPackageHash { - fn as_ref(&self) -> &[u8] { - self.0.as_ref() - } -} - -impl TryFrom<&[u8]> for ContractPackageHash { - type Error = TryFromSliceForContractHashError; - - fn try_from(bytes: &[u8]) -> Result { - HashAddr::try_from(bytes) - .map(ContractPackageHash::new) - .map_err(|_| TryFromSliceForContractHashError(())) - } -} - -impl TryFrom<&Vec> for ContractPackageHash { - type Error = TryFromSliceForContractHashError; - - fn try_from(bytes: &Vec) -> Result { - HashAddr::try_from(bytes as &[u8]) - .map(ContractPackageHash::new) - .map_err(|_| TryFromSliceForContractHashError(())) - } -} - -#[cfg(feature = "json-schema")] -impl JsonSchema for ContractPackageHash { - fn schema_name() -> String { - String::from("ContractPackageHash") - } - - fn json_schema(gen: &mut SchemaGenerator) -> Schema { - let schema = gen.subschema_for::(); - let mut schema_object = schema.into_object(); - schema_object.metadata().description = - Some("The hash address of the contract package".to_string()); - schema_object.into() - } -} - -/// A enum to determine the lock status of the contract package. -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -pub enum ContractPackageStatus { - /// The package is locked and cannot be versioned. - Locked, - /// The package is unlocked and can be versioned. - Unlocked, -} - -impl ContractPackageStatus { - /// Create a new status flag based on a boolean value - pub fn new(is_locked: bool) -> Self { - if is_locked { - ContractPackageStatus::Locked - } else { - ContractPackageStatus::Unlocked - } - } -} - -impl Default for ContractPackageStatus { - fn default() -> Self { - Self::Unlocked - } -} - -impl ToBytes for ContractPackageStatus { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut result = bytesrepr::allocate_buffer(self)?; - match self { - ContractPackageStatus::Unlocked => result.append(&mut false.to_bytes()?), - ContractPackageStatus::Locked => result.append(&mut true.to_bytes()?), - } - Ok(result) - } - - fn serialized_length(&self) -> usize { - match self { - ContractPackageStatus::Unlocked => false.serialized_length(), - ContractPackageStatus::Locked => true.serialized_length(), - } - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - match self { - ContractPackageStatus::Locked => writer.push(u8::from(true)), - ContractPackageStatus::Unlocked => writer.push(u8::from(false)), - } - Ok(()) - } -} - -impl FromBytes for ContractPackageStatus { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (val, bytes) = bool::from_bytes(bytes)?; - let status = ContractPackageStatus::new(val); - Ok((status, bytes)) - } -} - -/// Contract definition, metadata, and security container. -#[derive(Debug, Clone, PartialEq, Eq, Default, Serialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -pub struct ContractPackage { - /// Key used to add or disable versions - access_key: URef, - /// All versions (enabled & disabled) - versions: ContractVersions, - /// Disabled versions - disabled_versions: DisabledVersions, - /// Mapping maintaining the set of URefs associated with each "user - /// group". This can be used to control access to methods in a particular - /// version of the contract. A method is callable by any context which - /// "knows" any of the URefs associated with the method's user group. - groups: Groups, - /// A flag that determines whether a contract is locked - lock_status: ContractPackageStatus, -} - -impl CLTyped for ContractPackage { - fn cl_type() -> CLType { - CLType::Any - } -} - -impl ContractPackage { - /// Create new `ContractPackage` (with no versions) from given access key. - pub fn new( - access_key: URef, - versions: ContractVersions, - disabled_versions: DisabledVersions, - groups: Groups, - lock_status: ContractPackageStatus, - ) -> Self { - ContractPackage { - access_key, - versions, - disabled_versions, - groups, - lock_status, - } - } - - /// Get the access key for this contract. - pub fn access_key(&self) -> URef { - self.access_key - } - - /// Get the mutable group definitions for this contract. - pub fn groups_mut(&mut self) -> &mut Groups { - &mut self.groups - } - - /// Get the group definitions for this contract. - pub fn groups(&self) -> &Groups { - &self.groups - } - - /// Adds new group to this contract. - pub fn add_group(&mut self, group: Group, urefs: BTreeSet) { - let v = self.groups.entry(group).or_default(); - v.extend(urefs) - } - - /// Lookup the contract hash for a given contract version (if present) - pub fn lookup_contract_hash( - &self, - contract_version_key: ContractVersionKey, - ) -> Option<&ContractHash> { - if !self.is_version_enabled(contract_version_key) { - return None; - } - self.versions.get(&contract_version_key) - } - - /// Returns `true` if the given contract version exists and is enabled. - pub fn is_version_enabled(&self, contract_version_key: ContractVersionKey) -> bool { - !self.disabled_versions.contains(&contract_version_key) - && self.versions.contains_key(&contract_version_key) - } - - /// Returns `true` if the given contract hash exists and is enabled. - pub fn is_contract_enabled(&self, contract_hash: &ContractHash) -> bool { - match self.find_contract_version_key_by_hash(contract_hash) { - Some(version_key) => !self.disabled_versions.contains(version_key), - None => false, - } - } - - /// Insert a new contract version; the next sequential version number will be issued. - pub fn insert_contract_version( - &mut self, - protocol_version_major: ProtocolVersionMajor, - contract_hash: ContractHash, - ) -> ContractVersionKey { - let contract_version = self.next_contract_version_for(protocol_version_major); - let key = ContractVersionKey::new(protocol_version_major, contract_version); - self.versions.insert(key, contract_hash); - key - } - - /// Disable the contract version corresponding to the given hash (if it exists). - pub fn disable_contract_version(&mut self, contract_hash: ContractHash) -> Result<(), Error> { - let contract_version_key = self - .find_contract_version_key_by_hash(&contract_hash) - .copied() - .ok_or(Error::ContractNotFound)?; - - if !self.disabled_versions.contains(&contract_version_key) { - self.disabled_versions.insert(contract_version_key); - } - - Ok(()) - } - - /// Enable the contract version corresponding to the given hash (if it exists). - pub fn enable_contract_version(&mut self, contract_hash: ContractHash) -> Result<(), Error> { - let contract_version_key = self - .find_contract_version_key_by_hash(&contract_hash) - .copied() - .ok_or(Error::ContractNotFound)?; - - self.disabled_versions.remove(&contract_version_key); - - Ok(()) - } - - fn find_contract_version_key_by_hash( - &self, - contract_hash: &ContractHash, - ) -> Option<&ContractVersionKey> { - self.versions - .iter() - .filter_map(|(k, v)| if v == contract_hash { Some(k) } else { None }) - .next() - } - - /// Returns reference to all of this contract's versions. - pub fn versions(&self) -> &ContractVersions { - &self.versions - } - - /// Returns all of this contract's enabled contract versions. - pub fn enabled_versions(&self) -> ContractVersions { - let mut ret = ContractVersions::new(); - for version in &self.versions { - if !self.is_version_enabled(*version.0) { - continue; - } - ret.insert(*version.0, *version.1); - } - ret - } - - /// Returns mutable reference to all of this contract's versions (enabled and disabled). - pub fn versions_mut(&mut self) -> &mut ContractVersions { - &mut self.versions - } - - /// Consumes the object and returns all of this contract's versions (enabled and disabled). - pub fn take_versions(self) -> ContractVersions { - self.versions - } - - /// Returns all of this contract's disabled versions. - pub fn disabled_versions(&self) -> &DisabledVersions { - &self.disabled_versions - } - - /// Returns mut reference to all of this contract's disabled versions. - pub fn disabled_versions_mut(&mut self) -> &mut DisabledVersions { - &mut self.disabled_versions - } - - /// Removes a group from this contract (if it exists). - pub fn remove_group(&mut self, group: &Group) -> bool { - self.groups.remove(group).is_some() - } - - /// Gets the next available contract version for the given protocol version - fn next_contract_version_for(&self, protocol_version: ProtocolVersionMajor) -> ContractVersion { - let current_version = self - .versions - .keys() - .rev() - .find_map(|&contract_version_key| { - if contract_version_key.protocol_version_major() == protocol_version { - Some(contract_version_key.contract_version()) - } else { - None - } - }) - .unwrap_or(0); - - current_version + 1 - } - - /// Return the contract version key for the newest enabled contract version. - pub fn current_contract_version(&self) -> Option { - self.enabled_versions().keys().next_back().copied() - } - - /// Return the contract hash for the newest enabled contract version. - pub fn current_contract_hash(&self) -> Option { - self.enabled_versions().values().next_back().copied() - } - - /// Return the lock status of the contract package. - pub fn is_locked(&self) -> bool { - match self.lock_status { - ContractPackageStatus::Unlocked => false, - ContractPackageStatus::Locked => true, - } - } - - /// Return the package status itself - pub fn get_lock_status(&self) -> ContractPackageStatus { - self.lock_status.clone() - } -} - -impl ToBytes for ContractPackage { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut result = bytesrepr::allocate_buffer(self)?; - self.access_key().write_bytes(&mut result)?; - self.versions().write_bytes(&mut result)?; - self.disabled_versions().write_bytes(&mut result)?; - self.groups().write_bytes(&mut result)?; - self.lock_status.write_bytes(&mut result)?; - Ok(result) - } - - fn serialized_length(&self) -> usize { - self.access_key.serialized_length() - + self.versions.serialized_length() - + self.disabled_versions.serialized_length() - + self.groups.serialized_length() - + self.lock_status.serialized_length() - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.access_key().write_bytes(writer)?; - self.versions().write_bytes(writer)?; - self.disabled_versions().write_bytes(writer)?; - self.groups().write_bytes(writer)?; - self.lock_status.write_bytes(writer)?; - Ok(()) - } -} - -impl FromBytes for ContractPackage { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (access_key, bytes) = URef::from_bytes(bytes)?; - let (versions, bytes) = ContractVersions::from_bytes(bytes)?; - let (disabled_versions, bytes) = DisabledVersions::from_bytes(bytes)?; - let (groups, bytes) = Groups::from_bytes(bytes)?; - let (lock_status, bytes) = ContractPackageStatus::from_bytes(bytes)?; - let result = ContractPackage { - access_key, - versions, - disabled_versions, - groups, - lock_status, - }; - - Ok((result, bytes)) - } -} - -/// Type alias for a container used inside [`EntryPoints`]. -pub type EntryPointsMap = BTreeMap; - -/// Collection of named entry points -#[derive(Debug, Clone, PartialEq, Eq, Serialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -pub struct EntryPoints(EntryPointsMap); - -impl Default for EntryPoints { - fn default() -> Self { - let mut entry_points = EntryPoints::new(); - let entry_point = EntryPoint::default(); - entry_points.add_entry_point(entry_point); - entry_points - } -} - -impl ToBytes for EntryPoints { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - self.0.to_bytes() - } - fn serialized_length(&self) -> usize { - self.0.serialized_length() - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.0.write_bytes(writer)?; - Ok(()) - } -} - -impl FromBytes for EntryPoints { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (entry_points_map, rem) = EntryPointsMap::from_bytes(bytes)?; - Ok((EntryPoints(entry_points_map), rem)) - } -} - -impl EntryPoints { - /// Creates empty instance of [`EntryPoints`]. - pub fn new() -> EntryPoints { - EntryPoints(EntryPointsMap::new()) - } - - /// Adds new [`EntryPoint`]. - pub fn add_entry_point(&mut self, entry_point: EntryPoint) { - self.0.insert(entry_point.name().to_string(), entry_point); - } - - /// Checks if given [`EntryPoint`] exists. - pub fn has_entry_point(&self, entry_point_name: &str) -> bool { - self.0.contains_key(entry_point_name) - } - - /// Gets an existing [`EntryPoint`] by its name. - pub fn get(&self, entry_point_name: &str) -> Option<&EntryPoint> { - self.0.get(entry_point_name) - } - - /// Returns iterator for existing entry point names. - pub fn keys(&self) -> impl Iterator { - self.0.keys() - } - - /// Takes all entry points. - pub fn take_entry_points(self) -> Vec { - self.0.into_values().collect() - } - - /// Returns the length of the entry points - pub fn len(&self) -> usize { - self.0.len() - } - - /// Checks if the `EntryPoints` is empty. - pub fn is_empty(&self) -> bool { - self.0.is_empty() - } -} - -impl From> for EntryPoints { - fn from(entry_points: Vec) -> EntryPoints { - let entries = entry_points - .into_iter() - .map(|entry_point| (String::from(entry_point.name()), entry_point)) - .collect(); - EntryPoints(entries) - } -} - -/// Collection of named keys -pub type NamedKeys = BTreeMap; - -/// Methods and type signatures supported by a contract. -#[derive(Debug, Clone, PartialEq, Eq, Serialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -pub struct Contract { - contract_package_hash: ContractPackageHash, - contract_wasm_hash: ContractWasmHash, - named_keys: NamedKeys, - entry_points: EntryPoints, - protocol_version: ProtocolVersion, -} - -impl From - for ( - ContractPackageHash, - ContractWasmHash, - NamedKeys, - EntryPoints, - ProtocolVersion, - ) -{ - fn from(contract: Contract) -> Self { - ( - contract.contract_package_hash, - contract.contract_wasm_hash, - contract.named_keys, - contract.entry_points, - contract.protocol_version, - ) - } -} - -impl Contract { - /// `Contract` constructor. - pub fn new( - contract_package_hash: ContractPackageHash, - contract_wasm_hash: ContractWasmHash, - named_keys: NamedKeys, - entry_points: EntryPoints, - protocol_version: ProtocolVersion, - ) -> Self { - Contract { - contract_package_hash, - contract_wasm_hash, - named_keys, - entry_points, - protocol_version, - } - } - - /// Hash for accessing contract package - pub fn contract_package_hash(&self) -> ContractPackageHash { - self.contract_package_hash - } - - /// Hash for accessing contract WASM - pub fn contract_wasm_hash(&self) -> ContractWasmHash { - self.contract_wasm_hash - } - - /// Checks whether there is a method with the given name - pub fn has_entry_point(&self, name: &str) -> bool { - self.entry_points.has_entry_point(name) - } - - /// Returns the type signature for the given `method`. - pub fn entry_point(&self, method: &str) -> Option<&EntryPoint> { - self.entry_points.get(method) - } - - /// Get the protocol version this header is targeting. - pub fn protocol_version(&self) -> ProtocolVersion { - self.protocol_version - } - - /// Adds new entry point - pub fn add_entry_point>(&mut self, entry_point: EntryPoint) { - self.entry_points.add_entry_point(entry_point); - } - - /// Hash for accessing contract bytes - pub fn contract_wasm_key(&self) -> Key { - self.contract_wasm_hash.into() - } - - /// Returns immutable reference to methods - pub fn entry_points(&self) -> &EntryPoints { - &self.entry_points - } - - /// Takes `named_keys` - pub fn take_named_keys(self) -> NamedKeys { - self.named_keys - } - - /// Returns a reference to `named_keys` - pub fn named_keys(&self) -> &NamedKeys { - &self.named_keys - } - - /// Appends `keys` to `named_keys` - pub fn named_keys_append(&mut self, keys: &mut NamedKeys) { - self.named_keys.append(keys); - } - - /// Removes given named key. - pub fn remove_named_key(&mut self, key: &str) -> Option { - self.named_keys.remove(key) - } - - /// Set protocol_version. - pub fn set_protocol_version(&mut self, protocol_version: ProtocolVersion) { - self.protocol_version = protocol_version; - } - - /// Determines if `Contract` is compatible with a given `ProtocolVersion`. - pub fn is_compatible_protocol_version(&self, protocol_version: ProtocolVersion) -> bool { - self.protocol_version.value().major == protocol_version.value().major - } - - /// Extracts the access rights from the named keys of the contract. - pub fn extract_access_rights(&self, contract_hash: ContractHash) -> ContextAccessRights { - let urefs_iter = self - .named_keys - .values() - .filter_map(|key| key.as_uref().copied()); - ContextAccessRights::new(contract_hash.into(), urefs_iter) - } -} - -impl ToBytes for Contract { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut result = bytesrepr::allocate_buffer(self)?; - self.contract_package_hash().write_bytes(&mut result)?; - self.contract_wasm_hash().write_bytes(&mut result)?; - self.named_keys().write_bytes(&mut result)?; - self.entry_points().write_bytes(&mut result)?; - self.protocol_version().write_bytes(&mut result)?; - Ok(result) - } - - fn serialized_length(&self) -> usize { - ToBytes::serialized_length(&self.entry_points) - + ToBytes::serialized_length(&self.contract_package_hash) - + ToBytes::serialized_length(&self.contract_wasm_hash) - + ToBytes::serialized_length(&self.protocol_version) - + ToBytes::serialized_length(&self.named_keys) - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.contract_package_hash().write_bytes(writer)?; - self.contract_wasm_hash().write_bytes(writer)?; - self.named_keys().write_bytes(writer)?; - self.entry_points().write_bytes(writer)?; - self.protocol_version().write_bytes(writer)?; - Ok(()) - } -} - -impl FromBytes for Contract { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (contract_package_hash, bytes) = FromBytes::from_bytes(bytes)?; - let (contract_wasm_hash, bytes) = FromBytes::from_bytes(bytes)?; - let (named_keys, bytes) = NamedKeys::from_bytes(bytes)?; - let (entry_points, bytes) = EntryPoints::from_bytes(bytes)?; - let (protocol_version, bytes) = ProtocolVersion::from_bytes(bytes)?; - Ok(( - Contract { - contract_package_hash, - contract_wasm_hash, - named_keys, - entry_points, - protocol_version, - }, - bytes, - )) - } -} - -impl Default for Contract { - fn default() -> Self { - Contract { - named_keys: NamedKeys::default(), - entry_points: EntryPoints::default(), - contract_wasm_hash: [0; KEY_HASH_LENGTH].into(), - contract_package_hash: [0; KEY_HASH_LENGTH].into(), - protocol_version: ProtocolVersion::V1_0_0, - } - } -} - -/// Context of method execution -#[repr(u8)] -#[derive(Debug, Copy, Clone, PartialEq, Eq, Serialize, Deserialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -pub enum EntryPointType { - /// Runs as session code - Session = 0, - /// Runs within contract's context - Contract = 1, -} - -impl ToBytes for EntryPointType { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - (*self as u8).to_bytes() - } - - fn serialized_length(&self) -> usize { - 1 - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - writer.push(*self as u8); - Ok(()) - } -} - -impl FromBytes for EntryPointType { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (value, bytes) = u8::from_bytes(bytes)?; - match value { - 0 => Ok((EntryPointType::Session, bytes)), - 1 => Ok((EntryPointType::Contract, bytes)), - _ => Err(bytesrepr::Error::Formatting), - } - } -} - -/// Default name for an entry point -pub const DEFAULT_ENTRY_POINT_NAME: &str = "call"; - -/// Default name for an installer entry point -pub const ENTRY_POINT_NAME_INSTALL: &str = "install"; - -/// Default name for an upgrade entry point -pub const UPGRADE_ENTRY_POINT_NAME: &str = "upgrade"; - -/// Collection of entry point parameters. -pub type Parameters = Vec; - -/// Type signature of a method. Order of arguments matter since can be -/// referenced by index as well as name. -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -pub struct EntryPoint { - name: String, - args: Parameters, - ret: CLType, - access: EntryPointAccess, - entry_point_type: EntryPointType, -} - -impl From for (String, Parameters, CLType, EntryPointAccess, EntryPointType) { - fn from(entry_point: EntryPoint) -> Self { - ( - entry_point.name, - entry_point.args, - entry_point.ret, - entry_point.access, - entry_point.entry_point_type, - ) - } -} - -impl EntryPoint { - /// `EntryPoint` constructor. - pub fn new>( - name: T, - args: Parameters, - ret: CLType, - access: EntryPointAccess, - entry_point_type: EntryPointType, - ) -> Self { - EntryPoint { - name: name.into(), - args, - ret, - access, - entry_point_type, - } - } - - /// Create a default [`EntryPoint`] with specified name. - pub fn default_with_name>(name: T) -> Self { - EntryPoint { - name: name.into(), - ..Default::default() - } - } - - /// Get name. - pub fn name(&self) -> &str { - &self.name - } - - /// Get access enum. - pub fn access(&self) -> &EntryPointAccess { - &self.access - } - - /// Get the arguments for this method. - pub fn args(&self) -> &[Parameter] { - self.args.as_slice() - } - - /// Get the return type. - pub fn ret(&self) -> &CLType { - &self.ret - } - - /// Obtains entry point - pub fn entry_point_type(&self) -> EntryPointType { - self.entry_point_type - } -} - -impl Default for EntryPoint { - /// constructor for a public session `EntryPoint` that takes no args and returns `Unit` - fn default() -> Self { - EntryPoint { - name: DEFAULT_ENTRY_POINT_NAME.to_string(), - args: Vec::new(), - ret: CLType::Unit, - access: EntryPointAccess::Public, - entry_point_type: EntryPointType::Session, - } - } -} - -impl ToBytes for EntryPoint { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut result = bytesrepr::allocate_buffer(self)?; - result.append(&mut self.name.to_bytes()?); - result.append(&mut self.args.to_bytes()?); - self.ret.append_bytes(&mut result)?; - result.append(&mut self.access.to_bytes()?); - result.append(&mut self.entry_point_type.to_bytes()?); - - Ok(result) - } - - fn serialized_length(&self) -> usize { - self.name.serialized_length() - + self.args.serialized_length() - + self.ret.serialized_length() - + self.access.serialized_length() - + self.entry_point_type.serialized_length() - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.name().write_bytes(writer)?; - self.args.write_bytes(writer)?; - self.ret.append_bytes(writer)?; - self.access().write_bytes(writer)?; - self.entry_point_type().write_bytes(writer)?; - Ok(()) - } -} - -impl FromBytes for EntryPoint { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (name, bytes) = String::from_bytes(bytes)?; - let (args, bytes) = Vec::::from_bytes(bytes)?; - let (ret, bytes) = CLType::from_bytes(bytes)?; - let (access, bytes) = EntryPointAccess::from_bytes(bytes)?; - let (entry_point_type, bytes) = EntryPointType::from_bytes(bytes)?; - - Ok(( - EntryPoint { - name, - args, - ret, - access, - entry_point_type, - }, - bytes, - )) - } -} - -/// Enum describing the possible access control options for a contract entry -/// point (method). -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -pub enum EntryPointAccess { - /// Anyone can call this method (no access controls). - Public, - /// Only users from the listed groups may call this method. Note: if the - /// list is empty then this method is not callable from outside the - /// contract. - Groups(Vec), -} - -const ENTRYPOINTACCESS_PUBLIC_TAG: u8 = 1; -const ENTRYPOINTACCESS_GROUPS_TAG: u8 = 2; - -impl EntryPointAccess { - /// Constructor for access granted to only listed groups. - pub fn groups(labels: &[&str]) -> Self { - let list: Vec = labels.iter().map(|s| Group(String::from(*s))).collect(); - EntryPointAccess::Groups(list) - } -} - -impl ToBytes for EntryPointAccess { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut result = bytesrepr::allocate_buffer(self)?; - - match self { - EntryPointAccess::Public => { - result.push(ENTRYPOINTACCESS_PUBLIC_TAG); - } - EntryPointAccess::Groups(groups) => { - result.push(ENTRYPOINTACCESS_GROUPS_TAG); - result.append(&mut groups.to_bytes()?); - } - } - Ok(result) - } - - fn serialized_length(&self) -> usize { - match self { - EntryPointAccess::Public => 1, - EntryPointAccess::Groups(groups) => 1 + groups.serialized_length(), - } - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - match self { - EntryPointAccess::Public => { - writer.push(ENTRYPOINTACCESS_PUBLIC_TAG); - } - EntryPointAccess::Groups(groups) => { - writer.push(ENTRYPOINTACCESS_GROUPS_TAG); - groups.write_bytes(writer)?; - } - } - Ok(()) - } -} - -impl FromBytes for EntryPointAccess { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (tag, bytes) = u8::from_bytes(bytes)?; - - match tag { - ENTRYPOINTACCESS_PUBLIC_TAG => Ok((EntryPointAccess::Public, bytes)), - ENTRYPOINTACCESS_GROUPS_TAG => { - let (groups, bytes) = Vec::::from_bytes(bytes)?; - let result = EntryPointAccess::Groups(groups); - Ok((result, bytes)) - } - _ => Err(bytesrepr::Error::Formatting), - } - } -} - -/// Parameter to a method -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -pub struct Parameter { - name: String, - cl_type: CLType, -} - -impl Parameter { - /// `Parameter` constructor. - pub fn new>(name: T, cl_type: CLType) -> Self { - Parameter { - name: name.into(), - cl_type, - } - } - - /// Get the type of this argument. - pub fn cl_type(&self) -> &CLType { - &self.cl_type - } - - /// Get a reference to the parameter's name. - pub fn name(&self) -> &str { - &self.name - } -} - -impl From for (String, CLType) { - fn from(parameter: Parameter) -> Self { - (parameter.name, parameter.cl_type) - } -} - -impl ToBytes for Parameter { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut result = ToBytes::to_bytes(&self.name)?; - self.cl_type.append_bytes(&mut result)?; - - Ok(result) - } - - fn serialized_length(&self) -> usize { - ToBytes::serialized_length(&self.name) + self.cl_type.serialized_length() - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.name.write_bytes(writer)?; - self.cl_type.append_bytes(writer) - } -} - -impl FromBytes for Parameter { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (name, bytes) = String::from_bytes(bytes)?; - let (cl_type, bytes) = CLType::from_bytes(bytes)?; - - Ok((Parameter { name, cl_type }, bytes)) - } -} - -#[cfg(test)] -mod tests { - use std::iter::FromIterator; - - use super::*; - use crate::{AccessRights, URef, UREF_ADDR_LENGTH}; - use alloc::borrow::ToOwned; - - const CONTRACT_HASH_V1: ContractHash = ContractHash::new([42; 32]); - const CONTRACT_HASH_V2: ContractHash = ContractHash::new([84; 32]); - - fn make_contract_package() -> ContractPackage { - let mut contract_package = ContractPackage::new( - URef::new([0; 32], AccessRights::NONE), - ContractVersions::default(), - DisabledVersions::default(), - Groups::default(), - ContractPackageStatus::default(), - ); - - // add groups - { - let group_urefs = { - let mut ret = BTreeSet::new(); - ret.insert(URef::new([1; 32], AccessRights::READ)); - ret - }; - - contract_package - .groups_mut() - .insert(Group::new("Group 1"), group_urefs.clone()); - - contract_package - .groups_mut() - .insert(Group::new("Group 2"), group_urefs); - } - - // add entry_points - let _entry_points = { - let mut ret = BTreeMap::new(); - let entrypoint = EntryPoint::new( - "method0".to_string(), - vec![], - CLType::U32, - EntryPointAccess::groups(&["Group 2"]), - EntryPointType::Session, - ); - ret.insert(entrypoint.name().to_owned(), entrypoint); - let entrypoint = EntryPoint::new( - "method1".to_string(), - vec![Parameter::new("Foo", CLType::U32)], - CLType::U32, - EntryPointAccess::groups(&["Group 1"]), - EntryPointType::Session, - ); - ret.insert(entrypoint.name().to_owned(), entrypoint); - ret - }; - - let _contract_package_hash = [41; 32]; - let _contract_wasm_hash = [43; 32]; - let _named_keys = NamedKeys::new(); - let protocol_version = ProtocolVersion::V1_0_0; - - let v1 = contract_package - .insert_contract_version(protocol_version.value().major, CONTRACT_HASH_V1); - let v2 = contract_package - .insert_contract_version(protocol_version.value().major, CONTRACT_HASH_V2); - - assert!(v2 > v1); - - contract_package - } - - #[test] - fn next_contract_version() { - let major = 1; - let mut contract_package = ContractPackage::new( - URef::new([0; 32], AccessRights::NONE), - ContractVersions::default(), - DisabledVersions::default(), - Groups::default(), - ContractPackageStatus::default(), - ); - assert_eq!(contract_package.next_contract_version_for(major), 1); - - let next_version = contract_package.insert_contract_version(major, [123; 32].into()); - assert_eq!(next_version, ContractVersionKey::new(major, 1)); - assert_eq!(contract_package.next_contract_version_for(major), 2); - let next_version_2 = contract_package.insert_contract_version(major, [124; 32].into()); - assert_eq!(next_version_2, ContractVersionKey::new(major, 2)); - - let major = 2; - assert_eq!(contract_package.next_contract_version_for(major), 1); - let next_version_3 = contract_package.insert_contract_version(major, [42; 32].into()); - assert_eq!(next_version_3, ContractVersionKey::new(major, 1)); - } - - #[test] - fn roundtrip_serialization() { - let contract_package = make_contract_package(); - let bytes = contract_package.to_bytes().expect("should serialize"); - let (decoded_package, rem) = - ContractPackage::from_bytes(&bytes).expect("should deserialize"); - assert_eq!(contract_package, decoded_package); - assert_eq!(rem.len(), 0); - } - - #[test] - fn should_remove_group() { - let mut contract_package = make_contract_package(); - - assert!(!contract_package.remove_group(&Group::new("Non-existent group"))); - assert!(contract_package.remove_group(&Group::new("Group 1"))); - assert!(!contract_package.remove_group(&Group::new("Group 1"))); // Group no longer exists - } - - #[test] - fn should_disable_and_enable_contract_version() { - const NEW_CONTRACT_HASH: ContractHash = ContractHash::new([123; 32]); - - let mut contract_package = make_contract_package(); - - assert!( - !contract_package.is_contract_enabled(&NEW_CONTRACT_HASH), - "nonexisting contract contract should return false" - ); - - assert_eq!( - contract_package.current_contract_version(), - Some(ContractVersionKey(1, 2)) - ); - assert_eq!( - contract_package.current_contract_hash(), - Some(CONTRACT_HASH_V2) - ); - - assert_eq!( - contract_package.versions(), - &BTreeMap::from_iter([ - (ContractVersionKey(1, 1), CONTRACT_HASH_V1), - (ContractVersionKey(1, 2), CONTRACT_HASH_V2) - ]), - ); - assert_eq!( - contract_package.enabled_versions(), - BTreeMap::from_iter([ - (ContractVersionKey(1, 1), CONTRACT_HASH_V1), - (ContractVersionKey(1, 2), CONTRACT_HASH_V2) - ]), - ); - - assert!(!contract_package.is_contract_enabled(&NEW_CONTRACT_HASH)); - - assert_eq!( - contract_package.disable_contract_version(NEW_CONTRACT_HASH), - Err(Error::ContractNotFound), - "should return contract not found error" - ); - - assert!( - !contract_package.is_contract_enabled(&NEW_CONTRACT_HASH), - "disabling missing contract shouldnt change outcome" - ); - - let next_version = contract_package.insert_contract_version(1, NEW_CONTRACT_HASH); - assert!( - contract_package.is_version_enabled(next_version), - "version should exist and be enabled" - ); - assert!( - contract_package.is_contract_enabled(&NEW_CONTRACT_HASH), - "contract should be enabled" - ); - - assert_eq!( - contract_package.disable_contract_version(NEW_CONTRACT_HASH), - Ok(()), - "should be able to disable version" - ); - assert!(!contract_package.is_contract_enabled(&NEW_CONTRACT_HASH)); - - assert_eq!( - contract_package.lookup_contract_hash(next_version), - None, - "should not return disabled contract version" - ); - - assert!( - !contract_package.is_version_enabled(next_version), - "version should not be enabled" - ); - - assert_eq!( - contract_package.current_contract_version(), - Some(ContractVersionKey(1, 2)) - ); - assert_eq!( - contract_package.current_contract_hash(), - Some(CONTRACT_HASH_V2) - ); - assert_eq!( - contract_package.versions(), - &BTreeMap::from_iter([ - (ContractVersionKey(1, 1), CONTRACT_HASH_V1), - (ContractVersionKey(1, 2), CONTRACT_HASH_V2), - (next_version, NEW_CONTRACT_HASH), - ]), - ); - assert_eq!( - contract_package.enabled_versions(), - BTreeMap::from_iter([ - (ContractVersionKey(1, 1), CONTRACT_HASH_V1), - (ContractVersionKey(1, 2), CONTRACT_HASH_V2), - ]), - ); - assert_eq!( - contract_package.disabled_versions(), - &BTreeSet::from_iter([next_version]), - ); - - assert_eq!( - contract_package.current_contract_version(), - Some(ContractVersionKey(1, 2)) - ); - assert_eq!( - contract_package.current_contract_hash(), - Some(CONTRACT_HASH_V2) - ); - - assert_eq!( - contract_package.disable_contract_version(CONTRACT_HASH_V2), - Ok(()), - "should be able to disable version 2" - ); - - assert_eq!( - contract_package.enabled_versions(), - BTreeMap::from_iter([(ContractVersionKey(1, 1), CONTRACT_HASH_V1),]), - ); - - assert_eq!( - contract_package.current_contract_version(), - Some(ContractVersionKey(1, 1)) - ); - assert_eq!( - contract_package.current_contract_hash(), - Some(CONTRACT_HASH_V1) - ); - - assert_eq!( - contract_package.disabled_versions(), - &BTreeSet::from_iter([next_version, ContractVersionKey(1, 2)]), - ); - - assert_eq!( - contract_package.enable_contract_version(CONTRACT_HASH_V2), - Ok(()), - ); - - assert_eq!( - contract_package.enabled_versions(), - BTreeMap::from_iter([ - (ContractVersionKey(1, 1), CONTRACT_HASH_V1), - (ContractVersionKey(1, 2), CONTRACT_HASH_V2), - ]), - ); - - assert_eq!( - contract_package.disabled_versions(), - &BTreeSet::from_iter([next_version]) - ); - - assert_eq!( - contract_package.current_contract_hash(), - Some(CONTRACT_HASH_V2) - ); - - assert_eq!( - contract_package.enable_contract_version(NEW_CONTRACT_HASH), - Ok(()), - ); - - assert_eq!( - contract_package.enable_contract_version(NEW_CONTRACT_HASH), - Ok(()), - "enabling a contract twice should be a noop" - ); - - assert_eq!( - contract_package.enabled_versions(), - BTreeMap::from_iter([ - (ContractVersionKey(1, 1), CONTRACT_HASH_V1), - (ContractVersionKey(1, 2), CONTRACT_HASH_V2), - (next_version, NEW_CONTRACT_HASH), - ]), - ); - - assert_eq!(contract_package.disabled_versions(), &BTreeSet::new(),); - - assert_eq!( - contract_package.current_contract_hash(), - Some(NEW_CONTRACT_HASH) - ); - } - - #[test] - fn should_not_allow_to_enable_non_existing_version() { - let mut contract_package = make_contract_package(); - - assert_eq!( - contract_package.enable_contract_version(ContractHash::default()), - Err(Error::ContractNotFound), - ); - } - - #[test] - fn contract_hash_from_slice() { - let bytes: Vec = (0..32).collect(); - let contract_hash = HashAddr::try_from(&bytes[..]).expect("should create contract hash"); - let contract_hash = ContractHash::new(contract_hash); - assert_eq!(&bytes, &contract_hash.as_bytes()); - } - - #[test] - fn contract_package_hash_from_slice() { - let bytes: Vec = (0..32).collect(); - let contract_hash = HashAddr::try_from(&bytes[..]).expect("should create contract hash"); - let contract_hash = ContractPackageHash::new(contract_hash); - assert_eq!(&bytes, &contract_hash.as_bytes()); - } - - #[test] - fn contract_hash_from_str() { - let contract_hash = ContractHash([3; 32]); - let encoded = contract_hash.to_formatted_string(); - let decoded = ContractHash::from_formatted_str(&encoded).unwrap(); - assert_eq!(contract_hash, decoded); - - let invalid_prefix = - "contract--0000000000000000000000000000000000000000000000000000000000000000"; - assert!(ContractHash::from_formatted_str(invalid_prefix).is_err()); - - let short_addr = "contract-00000000000000000000000000000000000000000000000000000000000000"; - assert!(ContractHash::from_formatted_str(short_addr).is_err()); - - let long_addr = - "contract-000000000000000000000000000000000000000000000000000000000000000000"; - assert!(ContractHash::from_formatted_str(long_addr).is_err()); - - let invalid_hex = - "contract-000000000000000000000000000000000000000000000000000000000000000g"; - assert!(ContractHash::from_formatted_str(invalid_hex).is_err()); - } - - #[test] - fn contract_package_hash_from_str() { - let contract_package_hash = ContractPackageHash([3; 32]); - let encoded = contract_package_hash.to_formatted_string(); - let decoded = ContractPackageHash::from_formatted_str(&encoded).unwrap(); - assert_eq!(contract_package_hash, decoded); - - let invalid_prefix = - "contract-package0000000000000000000000000000000000000000000000000000000000000000"; - assert!(matches!( - ContractPackageHash::from_formatted_str(invalid_prefix).unwrap_err(), - FromStrError::InvalidPrefix - )); - - let short_addr = - "contract-package-00000000000000000000000000000000000000000000000000000000000000"; - assert!(matches!( - ContractPackageHash::from_formatted_str(short_addr).unwrap_err(), - FromStrError::Hash(_) - )); - - let long_addr = - "contract-package-000000000000000000000000000000000000000000000000000000000000000000"; - assert!(matches!( - ContractPackageHash::from_formatted_str(long_addr).unwrap_err(), - FromStrError::Hash(_) - )); - - let invalid_hex = - "contract-package-000000000000000000000000000000000000000000000000000000000000000g"; - assert!(matches!( - ContractPackageHash::from_formatted_str(invalid_hex).unwrap_err(), - FromStrError::Hex(_) - )); - } - - #[test] - fn contract_package_hash_from_legacy_str() { - let contract_package_hash = ContractPackageHash([3; 32]); - let hex_addr = contract_package_hash.to_string(); - let legacy_encoded = format!("contract-package-wasm{}", hex_addr); - let decoded_from_legacy = ContractPackageHash::from_formatted_str(&legacy_encoded) - .expect("should accept legacy prefixed string"); - assert_eq!( - contract_package_hash, decoded_from_legacy, - "decoded_from_legacy should equal decoded" - ); - - let invalid_prefix = - "contract-packagewasm0000000000000000000000000000000000000000000000000000000000000000"; - assert!(matches!( - ContractPackageHash::from_formatted_str(invalid_prefix).unwrap_err(), - FromStrError::InvalidPrefix - )); - - let short_addr = - "contract-package-wasm00000000000000000000000000000000000000000000000000000000000000"; - assert!(matches!( - ContractPackageHash::from_formatted_str(short_addr).unwrap_err(), - FromStrError::Hash(_) - )); - - let long_addr = - "contract-package-wasm000000000000000000000000000000000000000000000000000000000000000000"; - assert!(matches!( - ContractPackageHash::from_formatted_str(long_addr).unwrap_err(), - FromStrError::Hash(_) - )); - - let invalid_hex = - "contract-package-wasm000000000000000000000000000000000000000000000000000000000000000g"; - assert!(matches!( - ContractPackageHash::from_formatted_str(invalid_hex).unwrap_err(), - FromStrError::Hex(_) - )); - } - - #[test] - fn contract_hash_serde_roundtrip() { - let contract_hash = ContractHash([255; 32]); - let serialized = bincode::serialize(&contract_hash).unwrap(); - let deserialized = bincode::deserialize(&serialized).unwrap(); - assert_eq!(contract_hash, deserialized) - } - - #[test] - fn contract_hash_json_roundtrip() { - let contract_hash = ContractHash([255; 32]); - let json_string = serde_json::to_string_pretty(&contract_hash).unwrap(); - let decoded = serde_json::from_str(&json_string).unwrap(); - assert_eq!(contract_hash, decoded) - } - - #[test] - fn contract_package_hash_serde_roundtrip() { - let contract_hash = ContractPackageHash([255; 32]); - let serialized = bincode::serialize(&contract_hash).unwrap(); - let deserialized = bincode::deserialize(&serialized).unwrap(); - assert_eq!(contract_hash, deserialized) - } - - #[test] - fn contract_package_hash_json_roundtrip() { - let contract_hash = ContractPackageHash([255; 32]); - let json_string = serde_json::to_string_pretty(&contract_hash).unwrap(); - let decoded = serde_json::from_str(&json_string).unwrap(); - assert_eq!(contract_hash, decoded) - } - - #[test] - fn should_extract_access_rights() { - let contract_hash = ContractHash([255; 32]); - let uref = URef::new([84; UREF_ADDR_LENGTH], AccessRights::READ_ADD); - let uref_r = URef::new([42; UREF_ADDR_LENGTH], AccessRights::READ); - let uref_a = URef::new([42; UREF_ADDR_LENGTH], AccessRights::ADD); - let uref_w = URef::new([42; UREF_ADDR_LENGTH], AccessRights::WRITE); - let mut named_keys = NamedKeys::new(); - named_keys.insert("a".to_string(), Key::URef(uref_r)); - named_keys.insert("b".to_string(), Key::URef(uref_a)); - named_keys.insert("c".to_string(), Key::URef(uref_w)); - named_keys.insert("d".to_string(), Key::URef(uref)); - let contract = Contract::new( - ContractPackageHash::new([254; 32]), - ContractWasmHash::new([253; 32]), - named_keys, - EntryPoints::default(), - ProtocolVersion::V1_0_0, - ); - let access_rights = contract.extract_access_rights(contract_hash); - let expected_uref = URef::new([42; UREF_ADDR_LENGTH], AccessRights::READ_ADD_WRITE); - assert!( - access_rights.has_access_rights_to_uref(&uref), - "urefs in named keys should be included in access rights" - ); - assert!( - access_rights.has_access_rights_to_uref(&expected_uref), - "multiple access right bits to the same uref should coalesce" - ); - } -} - -#[cfg(test)] -mod prop_tests { - use proptest::prelude::*; - - use crate::{bytesrepr, gens}; - - proptest! { - // #![proptest_config(ProptestConfig { - // cases: 1024, - // .. ProptestConfig::default() - // })] - - #[test] - fn test_value_contract(contract in gens::contract_arb()) { - bytesrepr::test_serialization_roundtrip(&contract); - } - - #[test] - fn test_value_contract_package(contract_pkg in gens::contract_package_arb()) { - bytesrepr::test_serialization_roundtrip(&contract_pkg); - } - } -} diff --git a/casper_types/src/crypto.rs b/casper_types/src/crypto.rs deleted file mode 100644 index fbcd172c..00000000 --- a/casper_types/src/crypto.rs +++ /dev/null @@ -1,35 +0,0 @@ -//! Cryptographic types and operations on them - -mod asymmetric_key; -mod error; - -use blake2::{ - digest::{Update, VariableOutput}, - VarBlake2b, -}; - -use crate::key::BLAKE2B_DIGEST_LENGTH; -#[cfg(any(feature = "std", test))] -pub use asymmetric_key::generate_ed25519_keypair; -#[cfg(any(feature = "testing", feature = "gens", test))] -pub use asymmetric_key::gens; -pub use asymmetric_key::{ - sign, verify, AsymmetricType, PublicKey, SecretKey, Signature, ED25519_TAG, SECP256K1_TAG, - SYSTEM_ACCOUNT, SYSTEM_TAG, -}; -pub use error::Error; -#[cfg(any(feature = "std", test))] -pub use error::ErrorExt; - -#[doc(hidden)] -pub fn blake2b>(data: T) -> [u8; BLAKE2B_DIGEST_LENGTH] { - let mut result = [0; BLAKE2B_DIGEST_LENGTH]; - // NOTE: Assumed safe as `BLAKE2B_DIGEST_LENGTH` is a valid value for a hasher - let mut hasher = VarBlake2b::new(BLAKE2B_DIGEST_LENGTH).expect("should create hasher"); - - hasher.update(data); - hasher.finalize_variable(|slice| { - result.copy_from_slice(slice); - }); - result -} diff --git a/casper_types/src/crypto/asymmetric_key.rs b/casper_types/src/crypto/asymmetric_key.rs deleted file mode 100644 index 5c82289f..00000000 --- a/casper_types/src/crypto/asymmetric_key.rs +++ /dev/null @@ -1,1274 +0,0 @@ -//! Asymmetric key types and methods on them - -use alloc::{ - format, - string::{String, ToString}, - vec::Vec, -}; -use core::{ - cmp::Ordering, - convert::TryFrom, - fmt::{self, Debug, Display, Formatter}, - hash::{Hash, Hasher}, - iter, - marker::Copy, -}; -#[cfg(any(feature = "std", test))] -use std::path::Path; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(any(feature = "std", test))] -use derp::{Der, Tag}; -use ed25519_dalek::{ - Signature as Ed25519Signature, SigningKey as Ed25519SecretKey, - VerifyingKey as Ed25519PublicKey, PUBLIC_KEY_LENGTH as ED25519_PUBLIC_KEY_LENGTH, - SECRET_KEY_LENGTH as ED25519_SECRET_KEY_LENGTH, SIGNATURE_LENGTH as ED25519_SIGNATURE_LENGTH, -}; -use hex_fmt::HexFmt; -use k256::ecdsa::{ - signature::{Signer, Verifier}, - Signature as Secp256k1Signature, SigningKey as Secp256k1SecretKey, - VerifyingKey as Secp256k1PublicKey, -}; -#[cfg(any(feature = "std", test))] -use once_cell::sync::Lazy; -#[cfg(any(feature = "std", test))] -use pem::Pem; -#[cfg(any(all(feature = "std", feature = "testing"), test))] -use rand::{Rng, RngCore}; -#[cfg(feature = "json-schema")] -use schemars::{gen::SchemaGenerator, schema::Schema, JsonSchema}; -use serde::{Deserialize, Deserializer, Serialize, Serializer}; -#[cfg(feature = "json-schema")] -use serde_json::json; -#[cfg(any(feature = "std", test))] -use untrusted::Input; - -#[cfg(any(all(feature = "std", feature = "testing"), test))] -use crate::testing::TestRng; -use crate::{ - account::AccountHash, - bytesrepr, - bytesrepr::{FromBytes, ToBytes, U8_SERIALIZED_LENGTH}, - checksummed_hex, - crypto::Error, - CLType, CLTyped, Tagged, -}; -#[cfg(any(feature = "std", test))] -use crate::{ - crypto::ErrorExt, - file_utils::{read_file, write_file, write_private_file}, -}; - -#[cfg(any(feature = "testing", test))] -pub mod gens; -#[cfg(test)] -mod tests; - -const TAG_LENGTH: usize = U8_SERIALIZED_LENGTH; - -/// Tag for system variant. -pub const SYSTEM_TAG: u8 = 0; -const SYSTEM: &str = "System"; - -/// Tag for ed25519 variant. -pub const ED25519_TAG: u8 = 1; -const ED25519: &str = "Ed25519"; - -/// Tag for secp256k1 variant. -pub const SECP256K1_TAG: u8 = 2; -const SECP256K1: &str = "Secp256k1"; - -const SECP256K1_SECRET_KEY_LENGTH: usize = 32; -const SECP256K1_COMPRESSED_PUBLIC_KEY_LENGTH: usize = 33; -const SECP256K1_SIGNATURE_LENGTH: usize = 64; - -/// Public key for system account. -pub const SYSTEM_ACCOUNT: PublicKey = PublicKey::System; - -// See https://www.secg.org/sec1-v2.pdf#subsection.C.4 -#[cfg(any(feature = "std", test))] -const EC_PUBLIC_KEY_OBJECT_IDENTIFIER: [u8; 7] = [42, 134, 72, 206, 61, 2, 1]; - -// See https://tools.ietf.org/html/rfc8410#section-10.3 -#[cfg(any(feature = "std", test))] -const ED25519_OBJECT_IDENTIFIER: [u8; 3] = [43, 101, 112]; -#[cfg(any(feature = "std", test))] -const ED25519_PEM_SECRET_KEY_TAG: &str = "PRIVATE KEY"; -#[cfg(any(feature = "std", test))] -const ED25519_PEM_PUBLIC_KEY_TAG: &str = "PUBLIC KEY"; - -// Ref? -#[cfg(any(feature = "std", test))] -const SECP256K1_OBJECT_IDENTIFIER: [u8; 5] = [43, 129, 4, 0, 10]; -#[cfg(any(feature = "std", test))] -const SECP256K1_PEM_SECRET_KEY_TAG: &str = "EC PRIVATE KEY"; -#[cfg(any(feature = "std", test))] -const SECP256K1_PEM_PUBLIC_KEY_TAG: &str = "PUBLIC KEY"; - -#[cfg(any(feature = "std", test))] -static ED25519_SECRET_KEY: Lazy = Lazy::new(|| { - let bytes = [15u8; SecretKey::ED25519_LENGTH]; - SecretKey::ed25519_from_bytes(bytes).unwrap() -}); - -#[cfg(any(feature = "std", test))] -static ED25519_PUBLIC_KEY: Lazy = Lazy::new(|| { - let bytes = [15u8; SecretKey::ED25519_LENGTH]; - let secret_key = SecretKey::ed25519_from_bytes(bytes).unwrap(); - PublicKey::from(&secret_key) -}); - -/// Operations on asymmetric cryptographic type. -pub trait AsymmetricType<'a> -where - Self: 'a + Sized + Tagged, - Vec: From<&'a Self>, -{ - /// Converts `self` to hex, where the first byte represents the algorithm tag. - fn to_hex(&'a self) -> String { - let bytes = iter::once(self.tag()) - .chain(Vec::::from(self)) - .collect::>(); - base16::encode_lower(&bytes) - } - - /// Tries to decode `Self` from its hex-representation. The hex format should be as produced - /// by `AsymmetricType::to_hex()`. - fn from_hex>(input: A) -> Result { - if input.as_ref().len() < 2 { - return Err(Error::AsymmetricKey( - "failed to decode from hex: too short".to_string(), - )); - } - - let (tag_hex, key_hex) = input.as_ref().split_at(2); - - let tag = checksummed_hex::decode(tag_hex)?; - let key_bytes = checksummed_hex::decode(key_hex)?; - - match tag[0] { - SYSTEM_TAG => { - if key_bytes.is_empty() { - Ok(Self::system()) - } else { - Err(Error::AsymmetricKey( - "failed to decode from hex: invalid system variant".to_string(), - )) - } - } - ED25519_TAG => Self::ed25519_from_bytes(&key_bytes), - SECP256K1_TAG => Self::secp256k1_from_bytes(&key_bytes), - _ => Err(Error::AsymmetricKey(format!( - "failed to decode from hex: invalid tag. Expected {}, {} or {}, got {}", - SYSTEM_TAG, ED25519_TAG, SECP256K1_TAG, tag[0] - ))), - } - } - - /// Constructs a new system variant. - fn system() -> Self; - - /// Constructs a new ed25519 variant from a byte slice. - fn ed25519_from_bytes>(bytes: T) -> Result; - - /// Constructs a new secp256k1 variant from a byte slice. - fn secp256k1_from_bytes>(bytes: T) -> Result; -} - -/// A secret or private asymmetric key. -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[non_exhaustive] -pub enum SecretKey { - /// System secret key. - System, - /// Ed25519 secret key. - #[cfg_attr(feature = "datasize", data_size(skip))] - // Manually verified to have no data on the heap. - Ed25519(Ed25519SecretKey), - /// secp256k1 secret key. - #[cfg_attr(feature = "datasize", data_size(skip))] - Secp256k1(Secp256k1SecretKey), -} - -impl SecretKey { - /// The length in bytes of a system secret key. - pub const SYSTEM_LENGTH: usize = 0; - - /// The length in bytes of an Ed25519 secret key. - pub const ED25519_LENGTH: usize = ED25519_SECRET_KEY_LENGTH; - - /// The length in bytes of a secp256k1 secret key. - pub const SECP256K1_LENGTH: usize = SECP256K1_SECRET_KEY_LENGTH; - - /// Constructs a new system variant. - pub fn system() -> Self { - SecretKey::System - } - - /// Constructs a new ed25519 variant from a byte slice. - pub fn ed25519_from_bytes>(bytes: T) -> Result { - Ok(SecretKey::Ed25519(Ed25519SecretKey::try_from( - bytes.as_ref(), - )?)) - } - - /// Constructs a new secp256k1 variant from a byte slice. - pub fn secp256k1_from_bytes>(bytes: T) -> Result { - Ok(SecretKey::Secp256k1( - Secp256k1SecretKey::from_slice(bytes.as_ref()).map_err(|_| Error::SignatureError)?, - )) - } - - fn variant_name(&self) -> &str { - match self { - SecretKey::System => SYSTEM, - SecretKey::Ed25519(_) => ED25519, - SecretKey::Secp256k1(_) => SECP256K1, - } - } -} - -#[cfg(any(feature = "std", test))] -impl SecretKey { - /// Generates a new ed25519 variant using the system's secure random number generator. - pub fn generate_ed25519() -> Result { - let mut bytes = [0u8; Self::ED25519_LENGTH]; - getrandom::getrandom(&mut bytes[..])?; - SecretKey::ed25519_from_bytes(bytes).map_err(Into::into) - } - - /// Generates a new secp256k1 variant using the system's secure random number generator. - pub fn generate_secp256k1() -> Result { - let mut bytes = [0u8; Self::SECP256K1_LENGTH]; - getrandom::getrandom(&mut bytes[..])?; - SecretKey::secp256k1_from_bytes(bytes).map_err(Into::into) - } - - /// Attempts to write the key bytes to the configured file path. - pub fn to_file>(&self, file: P) -> Result<(), ErrorExt> { - write_private_file(file, self.to_pem()?).map_err(ErrorExt::SecretKeySave) - } - - /// Attempts to read the key bytes from configured file path. - pub fn from_file>(file: P) -> Result { - let data = read_file(file).map_err(ErrorExt::SecretKeyLoad)?; - Self::from_pem(data) - } - - /// DER encodes a key. - pub fn to_der(&self) -> Result, ErrorExt> { - match self { - SecretKey::System => Err(Error::System(String::from("to_der")).into()), - SecretKey::Ed25519(secret_key) => { - // See https://tools.ietf.org/html/rfc8410#section-10.3 - let mut key_bytes = vec![]; - let mut der = Der::new(&mut key_bytes); - der.octet_string(&secret_key.to_bytes())?; - - let mut encoded = vec![]; - der = Der::new(&mut encoded); - der.sequence(|der| { - der.integer(&[0])?; - der.sequence(|der| der.oid(&ED25519_OBJECT_IDENTIFIER))?; - der.octet_string(&key_bytes) - })?; - Ok(encoded) - } - SecretKey::Secp256k1(secret_key) => { - // See https://www.secg.org/sec1-v2.pdf#subsection.C.4 - let mut oid_bytes = vec![]; - let mut der = Der::new(&mut oid_bytes); - der.oid(&SECP256K1_OBJECT_IDENTIFIER)?; - - let mut encoded = vec![]; - der = Der::new(&mut encoded); - der.sequence(|der| { - der.integer(&[1])?; - der.octet_string(secret_key.to_bytes().as_slice())?; - der.element(Tag::ContextSpecificConstructed0, &oid_bytes) - })?; - Ok(encoded) - } - } - } - - /// Decodes a key from a DER-encoded slice. - pub fn from_der>(input: T) -> Result { - let input = Input::from(input.as_ref()); - - let (key_type_tag, raw_bytes) = input.read_all(derp::Error::Read, |input| { - derp::nested(input, Tag::Sequence, |input| { - // Safe to ignore the first value which should be an integer. - let version_slice = - derp::expect_tag_and_get_value(input, Tag::Integer)?.as_slice_less_safe(); - if version_slice.len() != 1 { - return Err(derp::Error::NonZeroUnusedBits); - } - let version = version_slice[0]; - - // Read the next value. - let (tag, value) = derp::read_tag_and_get_value(input)?; - if tag == Tag::Sequence as u8 { - // Expecting an Ed25519 key. - if version != 0 { - return Err(derp::Error::WrongValue); - } - - // The sequence should have one element: an object identifier defining Ed25519. - let object_identifier = value.read_all(derp::Error::Read, |input| { - derp::expect_tag_and_get_value(input, Tag::Oid) - })?; - if object_identifier.as_slice_less_safe() != ED25519_OBJECT_IDENTIFIER { - return Err(derp::Error::WrongValue); - } - - // The third and final value should be the raw bytes of the secret key as an - // octet string in an octet string. - let raw_bytes = derp::nested(input, Tag::OctetString, |input| { - derp::expect_tag_and_get_value(input, Tag::OctetString) - })? - .as_slice_less_safe(); - - return Ok((ED25519_TAG, raw_bytes)); - } else if tag == Tag::OctetString as u8 { - // Expecting a secp256k1 key. - if version != 1 { - return Err(derp::Error::WrongValue); - } - - // The octet string is the secret key. - let raw_bytes = value.as_slice_less_safe(); - - // The object identifier is next. - let parameter0 = - derp::expect_tag_and_get_value(input, Tag::ContextSpecificConstructed0)?; - let object_identifier = parameter0.read_all(derp::Error::Read, |input| { - derp::expect_tag_and_get_value(input, Tag::Oid) - })?; - if object_identifier.as_slice_less_safe() != SECP256K1_OBJECT_IDENTIFIER { - return Err(derp::Error::WrongValue); - } - - // There might be an optional public key as the final value, but we're not - // interested in parsing that. Read it to ensure `input.read_all` doesn't fail - // with unused bytes error. - let _ = derp::read_tag_and_get_value(input); - - return Ok((SECP256K1_TAG, raw_bytes)); - } - - Err(derp::Error::WrongValue) - }) - })?; - - match key_type_tag { - SYSTEM_TAG => Err(Error::AsymmetricKey("cannot construct variant".to_string()).into()), - ED25519_TAG => SecretKey::ed25519_from_bytes(raw_bytes).map_err(Into::into), - SECP256K1_TAG => SecretKey::secp256k1_from_bytes(raw_bytes).map_err(Into::into), - _ => Err(Error::AsymmetricKey("unknown type tag".to_string()).into()), - } - } - - /// PEM encodes a key. - pub fn to_pem(&self) -> Result { - let tag = match self { - SecretKey::System => return Err(Error::System(String::from("to_pem")).into()), - SecretKey::Ed25519(_) => ED25519_PEM_SECRET_KEY_TAG.to_string(), - SecretKey::Secp256k1(_) => SECP256K1_PEM_SECRET_KEY_TAG.to_string(), - }; - let contents = self.to_der()?; - let pem = Pem { tag, contents }; - Ok(pem::encode(&pem)) - } - - /// Decodes a key from a PEM-encoded slice. - pub fn from_pem>(input: T) -> Result { - let pem = pem::parse(input)?; - - let secret_key = Self::from_der(&pem.contents)?; - - let bad_tag = |expected_tag: &str| { - ErrorExt::FromPem(format!( - "invalid tag: expected {}, got {}", - expected_tag, pem.tag - )) - }; - - match secret_key { - SecretKey::System => return Err(Error::System(String::from("from_pem")).into()), - SecretKey::Ed25519(_) => { - if pem.tag != ED25519_PEM_SECRET_KEY_TAG { - return Err(bad_tag(ED25519_PEM_SECRET_KEY_TAG)); - } - } - SecretKey::Secp256k1(_) => { - if pem.tag != SECP256K1_PEM_SECRET_KEY_TAG { - return Err(bad_tag(SECP256K1_PEM_SECRET_KEY_TAG)); - } - } - } - - Ok(secret_key) - } - - /// Generates a random instance using a `TestRng`. - #[cfg(any(feature = "testing", test))] - pub fn random(rng: &mut TestRng) -> Self { - if rng.gen() { - Self::random_ed25519(rng) - } else { - Self::random_secp256k1(rng) - } - } - - /// Generates a random ed25519 instance using a `TestRng`. - #[cfg(any(feature = "testing", test))] - pub fn random_ed25519(rng: &mut TestRng) -> Self { - let mut bytes = [0u8; Self::ED25519_LENGTH]; - rng.fill_bytes(&mut bytes[..]); - SecretKey::ed25519_from_bytes(bytes).unwrap() - } - - /// Generates a random secp256k1 instance using a `TestRng`. - #[cfg(any(feature = "testing", test))] - pub fn random_secp256k1(rng: &mut TestRng) -> Self { - let mut bytes = [0u8; Self::SECP256K1_LENGTH]; - rng.fill_bytes(&mut bytes[..]); - SecretKey::secp256k1_from_bytes(bytes).unwrap() - } - - /// Returns an example value for documentation purposes. - pub fn doc_example() -> &'static Self { - &ED25519_SECRET_KEY - } -} - -impl Debug for SecretKey { - fn fmt(&self, formatter: &mut Formatter<'_>) -> fmt::Result { - write!(formatter, "SecretKey::{}", self.variant_name()) - } -} - -impl Display for SecretKey { - fn fmt(&self, formatter: &mut Formatter<'_>) -> fmt::Result { - ::fmt(self, formatter) - } -} - -impl Tagged for SecretKey { - fn tag(&self) -> u8 { - match self { - SecretKey::System => SYSTEM_TAG, - SecretKey::Ed25519(_) => ED25519_TAG, - SecretKey::Secp256k1(_) => SECP256K1_TAG, - } - } -} - -/// A public asymmetric key. -#[derive(Clone, Eq, PartialEq)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[non_exhaustive] -pub enum PublicKey { - /// System public key. - System, - /// Ed25519 public key. - #[cfg_attr(feature = "datasize", data_size(skip))] - Ed25519(Ed25519PublicKey), - /// secp256k1 public key. - #[cfg_attr(feature = "datasize", data_size(skip))] - Secp256k1(Secp256k1PublicKey), -} - -impl PublicKey { - /// The length in bytes of a system public key. - pub const SYSTEM_LENGTH: usize = 0; - - /// The length in bytes of an Ed25519 public key. - pub const ED25519_LENGTH: usize = ED25519_PUBLIC_KEY_LENGTH; - - /// The length in bytes of a secp256k1 public key. - pub const SECP256K1_LENGTH: usize = SECP256K1_COMPRESSED_PUBLIC_KEY_LENGTH; - - /// Creates an `AccountHash` from a given `PublicKey` instance. - pub fn to_account_hash(&self) -> AccountHash { - AccountHash::from(self) - } - - /// Returns `true` if this public key is of the `System` variant. - pub fn is_system(&self) -> bool { - matches!(self, PublicKey::System) - } - - fn variant_name(&self) -> &str { - match self { - PublicKey::System => SYSTEM, - PublicKey::Ed25519(_) => ED25519, - PublicKey::Secp256k1(_) => SECP256K1, - } - } -} - -#[cfg(any(feature = "std", test))] -impl PublicKey { - /// Generates a new ed25519 variant using the system's secure random number generator. - pub fn generate_ed25519() -> Result { - let mut bytes = [0u8; Self::ED25519_LENGTH]; - getrandom::getrandom(&mut bytes[..]).expect("RNG failure!"); - PublicKey::ed25519_from_bytes(bytes).map_err(Into::into) - } - - /// Generates a new secp256k1 variant using the system's secure random number generator. - pub fn generate_secp256k1() -> Result { - let mut bytes = [0u8; Self::SECP256K1_LENGTH]; - getrandom::getrandom(&mut bytes[..]).expect("RNG failure!"); - PublicKey::secp256k1_from_bytes(bytes).map_err(Into::into) - } - - /// Attempts to write the key bytes to the configured file path. - pub fn to_file>(&self, file: P) -> Result<(), ErrorExt> { - write_file(file, self.to_pem()?).map_err(ErrorExt::PublicKeySave) - } - - /// Attempts to read the key bytes from configured file path. - pub fn from_file>(file: P) -> Result { - let data = read_file(file).map_err(ErrorExt::PublicKeyLoad)?; - Self::from_pem(data) - } - - /// DER encodes a key. - pub fn to_der(&self) -> Result, ErrorExt> { - match self { - PublicKey::System => Err(Error::System(String::from("to_der")).into()), - PublicKey::Ed25519(public_key) => { - // See https://tools.ietf.org/html/rfc8410#section-10.1 - let mut encoded = vec![]; - let mut der = Der::new(&mut encoded); - der.sequence(|der| { - der.sequence(|der| der.oid(&ED25519_OBJECT_IDENTIFIER))?; - der.bit_string(0, public_key.as_ref()) - })?; - Ok(encoded) - } - PublicKey::Secp256k1(public_key) => { - // See https://www.secg.org/sec1-v2.pdf#subsection.C.3 - let mut encoded = vec![]; - let mut der = Der::new(&mut encoded); - der.sequence(|der| { - der.sequence(|der| { - der.oid(&EC_PUBLIC_KEY_OBJECT_IDENTIFIER)?; - der.oid(&SECP256K1_OBJECT_IDENTIFIER) - })?; - der.bit_string(0, public_key.to_encoded_point(true).as_ref()) - })?; - Ok(encoded) - } - } - } - - /// Decodes a key from a DER-encoded slice. - pub fn from_der>(input: T) -> Result { - let input = Input::from(input.as_ref()); - - let mut key_type_tag = ED25519_TAG; - let raw_bytes = input.read_all(derp::Error::Read, |input| { - derp::nested(input, Tag::Sequence, |input| { - derp::nested(input, Tag::Sequence, |input| { - // Read the first value. - let object_identifier = - derp::expect_tag_and_get_value(input, Tag::Oid)?.as_slice_less_safe(); - if object_identifier == ED25519_OBJECT_IDENTIFIER { - key_type_tag = ED25519_TAG; - Ok(()) - } else if object_identifier == EC_PUBLIC_KEY_OBJECT_IDENTIFIER { - // Assert the next object identifier is the secp256k1 ID. - let next_object_identifier = - derp::expect_tag_and_get_value(input, Tag::Oid)?.as_slice_less_safe(); - if next_object_identifier != SECP256K1_OBJECT_IDENTIFIER { - return Err(derp::Error::WrongValue); - } - - key_type_tag = SECP256K1_TAG; - Ok(()) - } else { - Err(derp::Error::WrongValue) - } - })?; - Ok(derp::bit_string_with_no_unused_bits(input)?.as_slice_less_safe()) - }) - })?; - - match key_type_tag { - ED25519_TAG => PublicKey::ed25519_from_bytes(raw_bytes).map_err(Into::into), - SECP256K1_TAG => PublicKey::secp256k1_from_bytes(raw_bytes).map_err(Into::into), - _ => unreachable!(), - } - } - - /// PEM encodes a key. - pub fn to_pem(&self) -> Result { - let tag = match self { - PublicKey::System => return Err(Error::System(String::from("to_pem")).into()), - PublicKey::Ed25519(_) => ED25519_PEM_PUBLIC_KEY_TAG.to_string(), - PublicKey::Secp256k1(_) => SECP256K1_PEM_PUBLIC_KEY_TAG.to_string(), - }; - let contents = self.to_der()?; - let pem = Pem { tag, contents }; - Ok(pem::encode(&pem)) - } - - /// Decodes a key from a PEM-encoded slice. - pub fn from_pem>(input: T) -> Result { - let pem = pem::parse(input)?; - let public_key = Self::from_der(&pem.contents)?; - let bad_tag = |expected_tag: &str| { - ErrorExt::FromPem(format!( - "invalid tag: expected {}, got {}", - expected_tag, pem.tag - )) - }; - match public_key { - PublicKey::System => return Err(Error::System(String::from("from_pem")).into()), - PublicKey::Ed25519(_) => { - if pem.tag != ED25519_PEM_PUBLIC_KEY_TAG { - return Err(bad_tag(ED25519_PEM_PUBLIC_KEY_TAG)); - } - } - PublicKey::Secp256k1(_) => { - if pem.tag != SECP256K1_PEM_PUBLIC_KEY_TAG { - return Err(bad_tag(SECP256K1_PEM_PUBLIC_KEY_TAG)); - } - } - } - Ok(public_key) - } - - /// Generates a random instance using a `TestRng`. - #[cfg(any(feature = "testing", test))] - pub fn random(rng: &mut TestRng) -> Self { - let secret_key = SecretKey::random(rng); - PublicKey::from(&secret_key) - } - - /// Generates a random ed25519 instance using a `TestRng`. - #[cfg(any(feature = "testing", test))] - pub fn random_ed25519(rng: &mut TestRng) -> Self { - let secret_key = SecretKey::random_ed25519(rng); - PublicKey::from(&secret_key) - } - - /// Generates a random secp256k1 instance using a `TestRng`. - #[cfg(any(feature = "testing", test))] - pub fn random_secp256k1(rng: &mut TestRng) -> Self { - let secret_key = SecretKey::random_secp256k1(rng); - PublicKey::from(&secret_key) - } - - /// Returns an example value for documentation purposes. - pub fn doc_example() -> &'static Self { - &ED25519_PUBLIC_KEY - } -} - -impl AsymmetricType<'_> for PublicKey { - fn system() -> Self { - PublicKey::System - } - - fn ed25519_from_bytes>(bytes: T) -> Result { - Ok(PublicKey::Ed25519(Ed25519PublicKey::try_from( - bytes.as_ref(), - )?)) - } - - fn secp256k1_from_bytes>(bytes: T) -> Result { - Ok(PublicKey::Secp256k1( - Secp256k1PublicKey::from_sec1_bytes(bytes.as_ref()) - .map_err(|_| Error::SignatureError)?, - )) - } -} - -impl From<&SecretKey> for PublicKey { - fn from(secret_key: &SecretKey) -> PublicKey { - match secret_key { - SecretKey::System => PublicKey::System, - SecretKey::Ed25519(secret_key) => PublicKey::Ed25519(secret_key.into()), - SecretKey::Secp256k1(secret_key) => PublicKey::Secp256k1(secret_key.into()), - } - } -} - -impl From<&PublicKey> for Vec { - fn from(public_key: &PublicKey) -> Self { - match public_key { - PublicKey::System => Vec::new(), - PublicKey::Ed25519(key) => key.to_bytes().into(), - PublicKey::Secp256k1(key) => key.to_encoded_point(true).as_ref().into(), - } - } -} - -impl From for Vec { - fn from(public_key: PublicKey) -> Self { - Vec::::from(&public_key) - } -} - -impl Debug for PublicKey { - fn fmt(&self, formatter: &mut Formatter<'_>) -> fmt::Result { - write!( - formatter, - "PublicKey::{}({})", - self.variant_name(), - base16::encode_lower(&Into::>::into(self)) - ) - } -} - -impl Display for PublicKey { - fn fmt(&self, formatter: &mut Formatter<'_>) -> fmt::Result { - write!( - formatter, - "PubKey::{}({:10})", - self.variant_name(), - HexFmt(Into::>::into(self)) - ) - } -} - -impl PartialOrd for PublicKey { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } -} - -impl Ord for PublicKey { - fn cmp(&self, other: &Self) -> Ordering { - let self_tag = self.tag(); - let other_tag = other.tag(); - if self_tag == other_tag { - Into::>::into(self).cmp(&Into::>::into(other)) - } else { - self_tag.cmp(&other_tag) - } - } -} - -// This implementation of `Hash` agrees with the derived `PartialEq`. It's required since -// `ed25519_dalek::PublicKey` doesn't implement `Hash`. -#[allow(clippy::derived_hash_with_manual_eq)] -impl Hash for PublicKey { - fn hash(&self, state: &mut H) { - self.tag().hash(state); - Into::>::into(self).hash(state); - } -} - -impl Tagged for PublicKey { - fn tag(&self) -> u8 { - match self { - PublicKey::System => SYSTEM_TAG, - PublicKey::Ed25519(_) => ED25519_TAG, - PublicKey::Secp256k1(_) => SECP256K1_TAG, - } - } -} - -impl ToBytes for PublicKey { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - TAG_LENGTH - + match self { - PublicKey::System => Self::SYSTEM_LENGTH, - PublicKey::Ed25519(_) => Self::ED25519_LENGTH, - PublicKey::Secp256k1(_) => Self::SECP256K1_LENGTH, - } - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - match self { - PublicKey::System => writer.push(SYSTEM_TAG), - PublicKey::Ed25519(public_key) => { - writer.push(ED25519_TAG); - writer.extend_from_slice(public_key.as_bytes()); - } - PublicKey::Secp256k1(public_key) => { - writer.push(SECP256K1_TAG); - writer.extend_from_slice(public_key.to_encoded_point(true).as_ref()); - } - } - Ok(()) - } -} - -impl FromBytes for PublicKey { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (tag, remainder) = u8::from_bytes(bytes)?; - match tag { - SYSTEM_TAG => Ok((PublicKey::System, remainder)), - ED25519_TAG => { - let (raw_bytes, remainder): ([u8; Self::ED25519_LENGTH], _) = - FromBytes::from_bytes(remainder)?; - let public_key = Self::ed25519_from_bytes(raw_bytes) - .map_err(|_error| bytesrepr::Error::Formatting)?; - Ok((public_key, remainder)) - } - SECP256K1_TAG => { - let (raw_bytes, remainder): ([u8; Self::SECP256K1_LENGTH], _) = - FromBytes::from_bytes(remainder)?; - let public_key = Self::secp256k1_from_bytes(raw_bytes) - .map_err(|_error| bytesrepr::Error::Formatting)?; - Ok((public_key, remainder)) - } - _ => Err(bytesrepr::Error::Formatting), - } - } -} - -impl Serialize for PublicKey { - fn serialize(&self, serializer: S) -> Result { - detail::serialize(self, serializer) - } -} - -impl<'de> Deserialize<'de> for PublicKey { - fn deserialize>(deserializer: D) -> Result { - detail::deserialize(deserializer) - } -} - -#[cfg(feature = "json-schema")] -impl JsonSchema for PublicKey { - fn schema_name() -> String { - String::from("PublicKey") - } - - fn json_schema(gen: &mut SchemaGenerator) -> Schema { - let schema = gen.subschema_for::(); - let mut schema_object = schema.into_object(); - schema_object.metadata().description = Some( - "Hex-encoded cryptographic public key, including the algorithm tag prefix.".to_string(), - ); - schema_object.metadata().examples = vec![ - json!({ - "name": "SystemPublicKey", - "description": "A pseudo public key, used for example when the system proposes an \ - immediate switch block after a network upgrade rather than a specific validator. \ - Its hex-encoded value is always '00', as is the corresponding pseudo signature's", - "value": "00" - }), - json!({ - "name": "Ed25519PublicKey", - "description": "An Ed25519 public key. Its hex-encoded value begins '01' and is \ - followed by 64 characters", - "value": "018a88e3dd7409f195fd52db2d3cba5d72ca6709bf1d94121bf3748801b40f6f5c" - }), - json!({ - "name": "Secp256k1PublicKey", - "description": "A secp256k1 public key. Its hex-encoded value begins '02' and is \ - followed by 66 characters", - "value": "0203408e9526316fd1f8def480dd45b2cc72ffd732771c9ceb5d92ffa4051e6ee084" - }), - ]; - schema_object.into() - } -} - -impl CLTyped for PublicKey { - fn cl_type() -> CLType { - CLType::PublicKey - } -} - -/// A signature of given data. -#[derive(Clone, Copy)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[non_exhaustive] -pub enum Signature { - /// System signature. Cannot be verified. - System, - /// Ed25519 signature. - #[cfg_attr(feature = "datasize", data_size(skip))] - Ed25519(Ed25519Signature), - /// Secp256k1 signature. - #[cfg_attr(feature = "datasize", data_size(skip))] - Secp256k1(Secp256k1Signature), -} - -impl Signature { - /// The length in bytes of a system signature, - pub const SYSTEM_LENGTH: usize = 0; - - /// The length in bytes of an Ed25519 signature, - pub const ED25519_LENGTH: usize = ED25519_SIGNATURE_LENGTH; - - /// The length in bytes of a secp256k1 signature - pub const SECP256K1_LENGTH: usize = SECP256K1_SIGNATURE_LENGTH; - - /// Constructs a new Ed25519 variant from a byte array. - pub fn ed25519(bytes: [u8; Self::ED25519_LENGTH]) -> Result { - let signature = Ed25519Signature::from_bytes(&bytes); - Ok(Signature::Ed25519(signature)) - } - - /// Constructs a new secp256k1 variant from a byte array. - pub fn secp256k1(bytes: [u8; Self::SECP256K1_LENGTH]) -> Result { - let signature = Secp256k1Signature::try_from(&bytes[..]).map_err(|_| { - Error::AsymmetricKey(format!( - "failed to construct secp256k1 signature from {:?}", - &bytes[..] - )) - })?; - - Ok(Signature::Secp256k1(signature)) - } - - fn variant_name(&self) -> &str { - match self { - Signature::System => SYSTEM, - Signature::Ed25519(_) => ED25519, - Signature::Secp256k1(_) => SECP256K1, - } - } -} - -impl AsymmetricType<'_> for Signature { - fn system() -> Self { - Signature::System - } - - fn ed25519_from_bytes>(bytes: T) -> Result { - let signature = Ed25519Signature::try_from(bytes.as_ref()).map_err(|_| { - Error::AsymmetricKey(format!( - "failed to construct Ed25519 signature from {:?}", - bytes.as_ref() - )) - })?; - Ok(Signature::Ed25519(signature)) - } - - fn secp256k1_from_bytes>(bytes: T) -> Result { - let signature = Secp256k1Signature::try_from(bytes.as_ref()).map_err(|_| { - Error::AsymmetricKey(format!( - "failed to construct secp256k1 signature from {:?}", - bytes.as_ref() - )) - })?; - Ok(Signature::Secp256k1(signature)) - } -} - -impl Debug for Signature { - fn fmt(&self, formatter: &mut Formatter<'_>) -> fmt::Result { - write!( - formatter, - "Signature::{}({})", - self.variant_name(), - base16::encode_lower(&Into::>::into(*self)) - ) - } -} - -impl Display for Signature { - fn fmt(&self, formatter: &mut Formatter<'_>) -> fmt::Result { - write!( - formatter, - "Sig::{}({:10})", - self.variant_name(), - HexFmt(Into::>::into(*self)) - ) - } -} - -impl PartialOrd for Signature { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } -} - -impl Ord for Signature { - fn cmp(&self, other: &Self) -> Ordering { - let self_tag = self.tag(); - let other_tag = other.tag(); - if self_tag == other_tag { - Into::>::into(*self).cmp(&Into::>::into(*other)) - } else { - self_tag.cmp(&other_tag) - } - } -} - -impl PartialEq for Signature { - fn eq(&self, other: &Self) -> bool { - self.tag() == other.tag() && Into::>::into(*self) == Into::>::into(*other) - } -} - -impl Eq for Signature {} - -impl Hash for Signature { - fn hash(&self, state: &mut H) { - self.tag().hash(state); - Into::>::into(*self).hash(state); - } -} - -impl Tagged for Signature { - fn tag(&self) -> u8 { - match self { - Signature::System => SYSTEM_TAG, - Signature::Ed25519(_) => ED25519_TAG, - Signature::Secp256k1(_) => SECP256K1_TAG, - } - } -} - -impl ToBytes for Signature { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - TAG_LENGTH - + match self { - Signature::System => Self::SYSTEM_LENGTH, - Signature::Ed25519(_) => Self::ED25519_LENGTH, - Signature::Secp256k1(_) => Self::SECP256K1_LENGTH, - } - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - match self { - Signature::System => { - writer.push(SYSTEM_TAG); - } - Signature::Ed25519(signature) => { - writer.push(ED25519_TAG); - writer.extend(signature.to_bytes()); - } - Signature::Secp256k1(signature) => { - writer.push(SECP256K1_TAG); - writer.extend_from_slice(&signature.to_bytes()); - } - } - Ok(()) - } -} - -impl FromBytes for Signature { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (tag, remainder) = u8::from_bytes(bytes)?; - match tag { - SYSTEM_TAG => Ok((Signature::System, remainder)), - ED25519_TAG => { - let (raw_bytes, remainder): ([u8; Self::ED25519_LENGTH], _) = - FromBytes::from_bytes(remainder)?; - let public_key = - Self::ed25519(raw_bytes).map_err(|_error| bytesrepr::Error::Formatting)?; - Ok((public_key, remainder)) - } - SECP256K1_TAG => { - let (raw_bytes, remainder): ([u8; Self::SECP256K1_LENGTH], _) = - FromBytes::from_bytes(remainder)?; - let public_key = - Self::secp256k1(raw_bytes).map_err(|_error| bytesrepr::Error::Formatting)?; - Ok((public_key, remainder)) - } - _ => Err(bytesrepr::Error::Formatting), - } - } -} - -impl Serialize for Signature { - fn serialize(&self, serializer: S) -> Result { - detail::serialize(self, serializer) - } -} - -impl<'de> Deserialize<'de> for Signature { - fn deserialize>(deserializer: D) -> Result { - detail::deserialize(deserializer) - } -} - -impl From<&Signature> for Vec { - fn from(signature: &Signature) -> Self { - match signature { - Signature::System => Vec::new(), - Signature::Ed25519(signature) => signature.to_bytes().into(), - Signature::Secp256k1(signature) => (*signature.to_bytes()).into(), - } - } -} - -impl From for Vec { - fn from(signature: Signature) -> Self { - Vec::::from(&signature) - } -} - -#[cfg(feature = "json-schema")] -impl JsonSchema for Signature { - fn schema_name() -> String { - String::from("Signature") - } - - fn json_schema(gen: &mut SchemaGenerator) -> Schema { - let schema = gen.subschema_for::(); - let mut schema_object = schema.into_object(); - schema_object.metadata().description = Some( - "Hex-encoded cryptographic signature, including the algorithm tag prefix.".to_string(), - ); - schema_object.into() - } -} - -/// Signs the given message using the given key pair. -pub fn sign>( - message: T, - secret_key: &SecretKey, - public_key: &PublicKey, -) -> Signature { - match (secret_key, public_key) { - (SecretKey::System, PublicKey::System) => { - panic!("cannot create signature with system keys",) - } - (SecretKey::Ed25519(secret_key), PublicKey::Ed25519(_public_key)) => { - let signature = secret_key.sign(message.as_ref()); - Signature::Ed25519(signature) - } - (SecretKey::Secp256k1(secret_key), PublicKey::Secp256k1(_public_key)) => { - let signer = secret_key; - let signature: Secp256k1Signature = signer - .try_sign(message.as_ref()) - .expect("should create signature"); - Signature::Secp256k1(signature) - } - _ => panic!("secret and public key types must match"), - } -} - -/// Verifies the signature of the given message against the given public key. -pub fn verify>( - message: T, - signature: &Signature, - public_key: &PublicKey, -) -> Result<(), Error> { - match (signature, public_key) { - (Signature::System, _) => Err(Error::AsymmetricKey(String::from( - "signatures based on the system key cannot be verified", - ))), - (Signature::Ed25519(signature), PublicKey::Ed25519(public_key)) => public_key - .verify_strict(message.as_ref(), signature) - .map_err(|_| Error::AsymmetricKey(String::from("failed to verify Ed25519 signature"))), - (Signature::Secp256k1(signature), PublicKey::Secp256k1(public_key)) => { - let verifier: &Secp256k1PublicKey = public_key; - verifier - .verify(message.as_ref(), signature) - .map_err(|error| { - Error::AsymmetricKey(format!("failed to verify secp256k1 signature: {}", error)) - }) - } - _ => Err(Error::AsymmetricKey(format!( - "type mismatch between {} and {}", - signature, public_key - ))), - } -} - -/// Generates an Ed25519 keypair using the operating system's cryptographically secure random number -/// generator. -#[cfg(any(feature = "std", test))] -pub fn generate_ed25519_keypair() -> (SecretKey, PublicKey) { - let secret_key = SecretKey::generate_ed25519().unwrap(); - let public_key = PublicKey::from(&secret_key); - (secret_key, public_key) -} - -mod detail { - use alloc::{string::String, vec::Vec}; - - use serde::{de::Error as _deError, Deserialize, Deserializer, Serialize, Serializer}; - - use super::{PublicKey, Signature}; - use crate::AsymmetricType; - - /// Used to serialize and deserialize asymmetric key types where the (de)serializer is not a - /// human-readable type. - /// - /// The wrapped contents are the result of calling `t_as_ref()` on the type. - #[derive(Serialize, Deserialize)] - pub(super) enum AsymmetricTypeAsBytes { - System, - Ed25519(Vec), - Secp256k1(Vec), - } - - impl From<&PublicKey> for AsymmetricTypeAsBytes { - fn from(public_key: &PublicKey) -> Self { - match public_key { - PublicKey::System => AsymmetricTypeAsBytes::System, - key @ PublicKey::Ed25519(_) => AsymmetricTypeAsBytes::Ed25519(key.into()), - key @ PublicKey::Secp256k1(_) => AsymmetricTypeAsBytes::Secp256k1(key.into()), - } - } - } - - impl From<&Signature> for AsymmetricTypeAsBytes { - fn from(signature: &Signature) -> Self { - match signature { - Signature::System => AsymmetricTypeAsBytes::System, - key @ Signature::Ed25519(_) => AsymmetricTypeAsBytes::Ed25519(key.into()), - key @ Signature::Secp256k1(_) => AsymmetricTypeAsBytes::Secp256k1(key.into()), - } - } - } - - pub(super) fn serialize<'a, T, S>(value: &'a T, serializer: S) -> Result - where - T: AsymmetricType<'a>, - Vec: From<&'a T>, - S: Serializer, - AsymmetricTypeAsBytes: From<&'a T>, - { - if serializer.is_human_readable() { - return value.to_hex().serialize(serializer); - } - - AsymmetricTypeAsBytes::from(value).serialize(serializer) - } - - pub(super) fn deserialize<'a, 'de, T, D>(deserializer: D) -> Result - where - T: AsymmetricType<'a>, - Vec: From<&'a T>, - D: Deserializer<'de>, - { - if deserializer.is_human_readable() { - let hex_string = String::deserialize(deserializer)?; - let value = T::from_hex(hex_string.as_bytes()).map_err(D::Error::custom)?; - return Ok(value); - } - - let as_bytes = AsymmetricTypeAsBytes::deserialize(deserializer)?; - match as_bytes { - AsymmetricTypeAsBytes::System => Ok(T::system()), - AsymmetricTypeAsBytes::Ed25519(raw_bytes) => { - T::ed25519_from_bytes(raw_bytes).map_err(D::Error::custom) - } - AsymmetricTypeAsBytes::Secp256k1(raw_bytes) => { - T::secp256k1_from_bytes(raw_bytes).map_err(D::Error::custom) - } - } - } -} diff --git a/casper_types/src/crypto/asymmetric_key/gens.rs b/casper_types/src/crypto/asymmetric_key/gens.rs deleted file mode 100644 index 2316133a..00000000 --- a/casper_types/src/crypto/asymmetric_key/gens.rs +++ /dev/null @@ -1,44 +0,0 @@ -//! Generators for asymmetric key types - -use core::convert::TryInto; - -use proptest::{ - collection, - prelude::{Arbitrary, Just, Strategy}, - prop_oneof, -}; - -use crate::{crypto::SecretKey, PublicKey}; - -/// Creates an arbitrary [`PublicKey`] -pub fn public_key_arb() -> impl Strategy { - prop_oneof![ - Just(PublicKey::System), - collection::vec(::arbitrary(), SecretKey::ED25519_LENGTH).prop_map(|bytes| { - let byte_array: [u8; SecretKey::ED25519_LENGTH] = bytes.try_into().unwrap(); - let secret_key = SecretKey::ed25519_from_bytes(byte_array).unwrap(); - PublicKey::from(&secret_key) - }), - collection::vec(::arbitrary(), SecretKey::SECP256K1_LENGTH).prop_map(|bytes| { - let bytes_array: [u8; SecretKey::SECP256K1_LENGTH] = bytes.try_into().unwrap(); - let secret_key = SecretKey::secp256k1_from_bytes(bytes_array).unwrap(); - PublicKey::from(&secret_key) - }) - ] -} - -/// Returns a strategy for creating random [`PublicKey`] instances but NOT system variant. -pub fn public_key_arb_no_system() -> impl Strategy { - prop_oneof![ - collection::vec(::arbitrary(), SecretKey::ED25519_LENGTH).prop_map(|bytes| { - let byte_array: [u8; SecretKey::ED25519_LENGTH] = bytes.try_into().unwrap(); - let secret_key = SecretKey::ed25519_from_bytes(byte_array).unwrap(); - PublicKey::from(&secret_key) - }), - collection::vec(::arbitrary(), SecretKey::SECP256K1_LENGTH).prop_map(|bytes| { - let bytes_array: [u8; SecretKey::SECP256K1_LENGTH] = bytes.try_into().unwrap(); - let secret_key = SecretKey::secp256k1_from_bytes(bytes_array).unwrap(); - PublicKey::from(&secret_key) - }) - ] -} diff --git a/casper_types/src/crypto/asymmetric_key/tests.rs b/casper_types/src/crypto/asymmetric_key/tests.rs deleted file mode 100644 index be7132da..00000000 --- a/casper_types/src/crypto/asymmetric_key/tests.rs +++ /dev/null @@ -1,862 +0,0 @@ -use std::{ - cmp::Ordering, - collections::hash_map::DefaultHasher, - hash::{Hash, Hasher}, - iter, -}; - -use rand::RngCore; - -use k256::elliptic_curve::sec1::ToEncodedPoint; -use openssl::pkey::{PKey, Private, Public}; - -use super::*; -use crate::{ - bytesrepr, checksummed_hex, crypto::SecretKey, testing::TestRng, AsymmetricType, PublicKey, - Tagged, -}; - -#[test] -fn can_construct_ed25519_keypair_from_zeroes() { - let bytes = [0; SecretKey::ED25519_LENGTH]; - let secret_key = SecretKey::ed25519_from_bytes(bytes).unwrap(); - let _public_key: PublicKey = (&secret_key).into(); -} - -#[test] -#[should_panic] -fn cannot_construct_secp256k1_keypair_from_zeroes() { - let bytes = [0; SecretKey::SECP256K1_LENGTH]; - let secret_key = SecretKey::secp256k1_from_bytes(bytes).unwrap(); - let _public_key: PublicKey = (&secret_key).into(); -} - -#[test] -fn can_construct_ed25519_keypair_from_ones() { - let bytes = [1; SecretKey::ED25519_LENGTH]; - let secret_key = SecretKey::ed25519_from_bytes(bytes).unwrap(); - let _public_key: PublicKey = (&secret_key).into(); -} - -#[test] -fn can_construct_secp256k1_keypair_from_ones() { - let bytes = [1; SecretKey::SECP256K1_LENGTH]; - let secret_key = SecretKey::secp256k1_from_bytes(bytes).unwrap(); - let _public_key: PublicKey = (&secret_key).into(); -} - -type OpenSSLSecretKey = PKey; -type OpenSSLPublicKey = PKey; - -// `SecretKey` does not implement `PartialEq`, so just compare derived `PublicKey`s. -fn assert_secret_keys_equal(lhs: &SecretKey, rhs: &SecretKey) { - assert_eq!(PublicKey::from(lhs), PublicKey::from(rhs)); -} - -fn secret_key_der_roundtrip(secret_key: SecretKey) { - let der_encoded = secret_key.to_der().unwrap(); - let decoded = SecretKey::from_der(&der_encoded).unwrap(); - assert_secret_keys_equal(&secret_key, &decoded); - assert_eq!(secret_key.tag(), decoded.tag()); - - // Ensure malformed encoded version fails to decode. - SecretKey::from_der(&der_encoded[1..]).unwrap_err(); -} - -fn secret_key_pem_roundtrip(secret_key: SecretKey) { - let pem_encoded = secret_key.to_pem().unwrap(); - let decoded = SecretKey::from_pem(pem_encoded.as_bytes()).unwrap(); - assert_secret_keys_equal(&secret_key, &decoded); - assert_eq!(secret_key.tag(), decoded.tag()); - - // Check PEM-encoded can be decoded by openssl. - let _ = OpenSSLSecretKey::private_key_from_pem(pem_encoded.as_bytes()).unwrap(); - - // Ensure malformed encoded version fails to decode. - SecretKey::from_pem(&pem_encoded[1..]).unwrap_err(); -} - -fn known_secret_key_to_pem(expected_key: &SecretKey, known_key_pem: &str, expected_tag: u8) { - let decoded = SecretKey::from_pem(known_key_pem.as_bytes()).unwrap(); - assert_secret_keys_equal(expected_key, &decoded); - assert_eq!(expected_tag, decoded.tag()); -} - -fn secret_key_file_roundtrip(secret_key: SecretKey) { - let tempdir = tempfile::tempdir().unwrap(); - let path = tempdir.path().join("test_secret_key.pem"); - - secret_key.to_file(&path).unwrap(); - let decoded = SecretKey::from_file(&path).unwrap(); - assert_secret_keys_equal(&secret_key, &decoded); - assert_eq!(secret_key.tag(), decoded.tag()); -} - -fn public_key_serialization_roundtrip(public_key: PublicKey) { - // Try to/from bincode. - let serialized = bincode::serialize(&public_key).unwrap(); - let deserialized = bincode::deserialize(&serialized).unwrap(); - assert_eq!(public_key, deserialized); - assert_eq!(public_key.tag(), deserialized.tag()); - - // Try to/from JSON. - let serialized = serde_json::to_vec_pretty(&public_key).unwrap(); - let deserialized = serde_json::from_slice(&serialized).unwrap(); - assert_eq!(public_key, deserialized); - assert_eq!(public_key.tag(), deserialized.tag()); - - // Using bytesrepr. - bytesrepr::test_serialization_roundtrip(&public_key); -} - -fn public_key_der_roundtrip(public_key: PublicKey) { - let der_encoded = public_key.to_der().unwrap(); - let decoded = PublicKey::from_der(&der_encoded).unwrap(); - assert_eq!(public_key, decoded); - - // Check DER-encoded can be decoded by openssl. - let _ = OpenSSLPublicKey::public_key_from_der(&der_encoded).unwrap(); - - // Ensure malformed encoded version fails to decode. - PublicKey::from_der(&der_encoded[1..]).unwrap_err(); -} - -fn public_key_pem_roundtrip(public_key: PublicKey) { - let pem_encoded = public_key.to_pem().unwrap(); - let decoded = PublicKey::from_pem(pem_encoded.as_bytes()).unwrap(); - assert_eq!(public_key, decoded); - assert_eq!(public_key.tag(), decoded.tag()); - - // Check PEM-encoded can be decoded by openssl. - let _ = OpenSSLPublicKey::public_key_from_pem(pem_encoded.as_bytes()).unwrap(); - - // Ensure malformed encoded version fails to decode. - PublicKey::from_pem(&pem_encoded[1..]).unwrap_err(); -} - -fn known_public_key_to_pem(known_key_hex: &str, known_key_pem: &str) { - let key_bytes = checksummed_hex::decode(known_key_hex).unwrap(); - let decoded = PublicKey::from_pem(known_key_pem.as_bytes()).unwrap(); - assert_eq!(key_bytes, Into::>::into(decoded)); -} - -fn public_key_file_roundtrip(public_key: PublicKey) { - let tempdir = tempfile::tempdir().unwrap(); - let path = tempdir.path().join("test_public_key.pem"); - - public_key.to_file(&path).unwrap(); - let decoded = PublicKey::from_file(&path).unwrap(); - assert_eq!(public_key, decoded); -} - -fn public_key_hex_roundtrip(public_key: PublicKey) { - let hex_encoded = public_key.to_hex(); - let decoded = PublicKey::from_hex(&hex_encoded).unwrap(); - assert_eq!(public_key, decoded); - assert_eq!(public_key.tag(), decoded.tag()); - - // Ensure malformed encoded version fails to decode. - PublicKey::from_hex(&hex_encoded[..1]).unwrap_err(); - PublicKey::from_hex(&hex_encoded[1..]).unwrap_err(); -} - -fn signature_serialization_roundtrip(signature: Signature) { - // Try to/from bincode. - let serialized = bincode::serialize(&signature).unwrap(); - let deserialized: Signature = bincode::deserialize(&serialized).unwrap(); - assert_eq!(signature, deserialized); - assert_eq!(signature.tag(), deserialized.tag()); - - // Try to/from JSON. - let serialized = serde_json::to_vec_pretty(&signature).unwrap(); - let deserialized = serde_json::from_slice(&serialized).unwrap(); - assert_eq!(signature, deserialized); - assert_eq!(signature.tag(), deserialized.tag()); - - // Try to/from using bytesrepr. - let serialized = bytesrepr::serialize(signature).unwrap(); - let deserialized = bytesrepr::deserialize(serialized).unwrap(); - assert_eq!(signature, deserialized); - assert_eq!(signature.tag(), deserialized.tag()) -} - -fn signature_hex_roundtrip(signature: Signature) { - let hex_encoded = signature.to_hex(); - let decoded = Signature::from_hex(hex_encoded.as_bytes()).unwrap(); - assert_eq!(signature, decoded); - assert_eq!(signature.tag(), decoded.tag()); - - // Ensure malformed encoded version fails to decode. - Signature::from_hex(&hex_encoded[..1]).unwrap_err(); - Signature::from_hex(&hex_encoded[1..]).unwrap_err(); -} - -fn hash(data: &T) -> u64 { - let mut hasher = DefaultHasher::new(); - data.hash(&mut hasher); - hasher.finish() -} - -fn check_ord_and_hash(low: T, high: T) { - #[allow(clippy::redundant_clone)] - let low_copy = low.clone(); - - assert_eq!(hash(&low), hash(&low_copy)); - assert_ne!(hash(&low), hash(&high)); - - assert_eq!(Ordering::Less, low.cmp(&high)); - assert_eq!(Some(Ordering::Less), low.partial_cmp(&high)); - - assert_eq!(Ordering::Greater, high.cmp(&low)); - assert_eq!(Some(Ordering::Greater), high.partial_cmp(&low)); - - assert_eq!(Ordering::Equal, low.cmp(&low_copy)); - assert_eq!(Some(Ordering::Equal), low.partial_cmp(&low_copy)); -} - -mod system { - use std::path::Path; - - use super::{sign, verify}; - use crate::crypto::{AsymmetricType, PublicKey, SecretKey, Signature}; - - #[test] - fn secret_key_to_der_should_error() { - assert!(SecretKey::system().to_der().is_err()); - } - - #[test] - fn secret_key_to_pem_should_error() { - assert!(SecretKey::system().to_pem().is_err()); - } - - #[test] - fn secret_key_to_file_should_error() { - assert!(SecretKey::system().to_file(Path::new("/dev/null")).is_err()); - } - - #[test] - fn public_key_serialization_roundtrip() { - super::public_key_serialization_roundtrip(PublicKey::system()); - } - - #[test] - fn public_key_to_der_should_error() { - assert!(PublicKey::system().to_der().is_err()); - } - - #[test] - fn public_key_to_pem_should_error() { - assert!(PublicKey::system().to_pem().is_err()); - } - - #[test] - fn public_key_to_file_should_error() { - assert!(PublicKey::system().to_file(Path::new("/dev/null")).is_err()); - } - - #[test] - fn public_key_to_and_from_hex() { - super::public_key_hex_roundtrip(PublicKey::system()); - } - - #[test] - #[should_panic] - fn sign_should_panic() { - sign([], &SecretKey::system(), &PublicKey::system()); - } - - #[test] - fn signature_to_and_from_hex() { - super::signature_hex_roundtrip(Signature::system()); - } - - #[test] - fn public_key_to_account_hash() { - assert_ne!( - PublicKey::system().to_account_hash().as_ref(), - Into::>::into(PublicKey::system()) - ); - } - - #[test] - fn verify_should_error() { - assert!(verify([], &Signature::system(), &PublicKey::system()).is_err()); - } - - #[test] - fn bytesrepr_roundtrip_signature() { - crate::bytesrepr::test_serialization_roundtrip(&Signature::system()); - } -} - -mod ed25519 { - use rand::Rng; - - use super::*; - use crate::ED25519_TAG; - - const SECRET_KEY_LENGTH: usize = SecretKey::ED25519_LENGTH; - const PUBLIC_KEY_LENGTH: usize = PublicKey::ED25519_LENGTH; - const SIGNATURE_LENGTH: usize = Signature::ED25519_LENGTH; - - #[test] - fn secret_key_from_bytes() { - // Secret key should be `SecretKey::ED25519_LENGTH` bytes. - let bytes = [0; SECRET_KEY_LENGTH + 1]; - assert!(SecretKey::ed25519_from_bytes(&bytes[..]).is_err()); - assert!(SecretKey::ed25519_from_bytes(&bytes[2..]).is_err()); - - // Check the same bytes but of the right length succeeds. - assert!(SecretKey::ed25519_from_bytes(&bytes[1..]).is_ok()); - } - - #[test] - fn secret_key_to_and_from_der() { - let mut rng = TestRng::new(); - let secret_key = SecretKey::random_ed25519(&mut rng); - let der_encoded = secret_key.to_der().unwrap(); - secret_key_der_roundtrip(secret_key); - - // Check DER-encoded can be decoded by openssl. - let _ = OpenSSLSecretKey::private_key_from_der(&der_encoded).unwrap(); - } - - #[test] - fn secret_key_to_and_from_pem() { - let mut rng = TestRng::new(); - let secret_key = SecretKey::random_ed25519(&mut rng); - secret_key_pem_roundtrip(secret_key); - } - - #[test] - fn known_secret_key_to_pem() { - // Example values taken from https://tools.ietf.org/html/rfc8410#section-10.3 - const KNOWN_KEY_PEM: &str = r#"-----BEGIN PRIVATE KEY----- -MC4CAQAwBQYDK2VwBCIEINTuctv5E1hK1bbY8fdp+K06/nwoy/HU++CXqI9EdVhC ------END PRIVATE KEY-----"#; - let key_bytes = - base16::decode("d4ee72dbf913584ad5b6d8f1f769f8ad3afe7c28cbf1d4fbe097a88f44755842") - .unwrap(); - let expected_key = SecretKey::ed25519_from_bytes(key_bytes).unwrap(); - super::known_secret_key_to_pem(&expected_key, KNOWN_KEY_PEM, ED25519_TAG); - } - - #[test] - fn secret_key_to_and_from_file() { - let mut rng = TestRng::new(); - let secret_key = SecretKey::random_ed25519(&mut rng); - secret_key_file_roundtrip(secret_key); - } - - #[test] - fn public_key_serialization_roundtrip() { - let mut rng = TestRng::new(); - let public_key = PublicKey::random_ed25519(&mut rng); - super::public_key_serialization_roundtrip(public_key); - } - - #[test] - fn public_key_from_bytes() { - // Public key should be `PublicKey::ED25519_LENGTH` bytes. Create vec with an extra - // byte. - let mut rng = TestRng::new(); - let public_key = PublicKey::random_ed25519(&mut rng); - let bytes: Vec = iter::once(rng.gen()) - .chain(Into::>::into(public_key)) - .collect::>(); - - assert!(PublicKey::ed25519_from_bytes(&bytes[..]).is_err()); - assert!(PublicKey::ed25519_from_bytes(&bytes[2..]).is_err()); - - // Check the same bytes but of the right length succeeds. - assert!(PublicKey::ed25519_from_bytes(&bytes[1..]).is_ok()); - } - - #[test] - fn public_key_to_and_from_der() { - let mut rng = TestRng::new(); - let public_key = PublicKey::random_ed25519(&mut rng); - public_key_der_roundtrip(public_key); - } - - #[test] - fn public_key_to_and_from_pem() { - let mut rng = TestRng::new(); - let public_key = PublicKey::random_ed25519(&mut rng); - public_key_pem_roundtrip(public_key); - } - - #[test] - fn known_public_key_to_pem() { - // Example values taken from https://tools.ietf.org/html/rfc8410#section-10.1 - const KNOWN_KEY_HEX: &str = - "19bf44096984cdfe8541bac167dc3b96c85086aa30b6b6cb0c5c38ad703166e1"; - const KNOWN_KEY_PEM: &str = r#"-----BEGIN PUBLIC KEY----- -MCowBQYDK2VwAyEAGb9ECWmEzf6FQbrBZ9w7lshQhqowtrbLDFw4rXAxZuE= ------END PUBLIC KEY-----"#; - super::known_public_key_to_pem(KNOWN_KEY_HEX, KNOWN_KEY_PEM); - } - - #[test] - fn public_key_to_and_from_file() { - let mut rng = TestRng::new(); - let public_key = PublicKey::random_ed25519(&mut rng); - public_key_file_roundtrip(public_key); - } - - #[test] - fn public_key_to_and_from_hex() { - let mut rng = TestRng::new(); - let public_key = PublicKey::random_ed25519(&mut rng); - public_key_hex_roundtrip(public_key); - } - - #[test] - fn signature_serialization_roundtrip() { - let mut rng = TestRng::new(); - let secret_key = SecretKey::random_ed25519(&mut rng); - let public_key = PublicKey::from(&secret_key); - let data = b"data"; - let signature = sign(data, &secret_key, &public_key); - super::signature_serialization_roundtrip(signature); - } - - #[test] - fn signature_from_bytes() { - // Signature should be `Signature::ED25519_LENGTH` bytes. - let bytes = [2; SIGNATURE_LENGTH + 1]; - assert!(Signature::ed25519_from_bytes(&bytes[..]).is_err()); - assert!(Signature::ed25519_from_bytes(&bytes[2..]).is_err()); - - // Check the same bytes but of the right length succeeds. - assert!(Signature::ed25519_from_bytes(&bytes[1..]).is_ok()); - } - - #[test] - fn signature_key_to_and_from_hex() { - let mut rng = TestRng::new(); - let secret_key = SecretKey::random_ed25519(&mut rng); - let public_key = PublicKey::from(&secret_key); - let data = b"data"; - let signature = sign(data, &secret_key, &public_key); - signature_hex_roundtrip(signature); - } - - #[test] - fn public_key_traits() { - let public_key_low = PublicKey::ed25519_from_bytes([1; PUBLIC_KEY_LENGTH]).unwrap(); - let public_key_high = PublicKey::ed25519_from_bytes([3; PUBLIC_KEY_LENGTH]).unwrap(); - check_ord_and_hash(public_key_low, public_key_high) - } - - #[test] - fn public_key_to_account_hash() { - let public_key_high = PublicKey::ed25519_from_bytes([255; PUBLIC_KEY_LENGTH]).unwrap(); - assert_ne!( - public_key_high.to_account_hash().as_ref(), - Into::>::into(public_key_high) - ); - } - - #[test] - fn signature_traits() { - let signature_low = Signature::ed25519([1; SIGNATURE_LENGTH]).unwrap(); - let signature_high = Signature::ed25519([3; SIGNATURE_LENGTH]).unwrap(); - check_ord_and_hash(signature_low, signature_high) - } - - #[test] - fn sign_and_verify() { - let mut rng = TestRng::new(); - let secret_key = SecretKey::random_ed25519(&mut rng); - - let public_key = PublicKey::from(&secret_key); - let other_public_key = PublicKey::random_ed25519(&mut rng); - let wrong_type_public_key = PublicKey::random_secp256k1(&mut rng); - - let message = b"message"; - let signature = sign(message, &secret_key, &public_key); - - assert!(verify(message, &signature, &public_key).is_ok()); - assert!(verify(message, &signature, &other_public_key).is_err()); - assert!(verify(message, &signature, &wrong_type_public_key).is_err()); - assert!(verify(&message[1..], &signature, &public_key).is_err()); - } - - #[test] - fn bytesrepr_roundtrip_signature() { - let mut rng = TestRng::new(); - let ed25519_secret_key = SecretKey::random_ed25519(&mut rng); - let public_key = PublicKey::from(&ed25519_secret_key); - let data = b"data"; - let signature = sign(data, &ed25519_secret_key, &public_key); - bytesrepr::test_serialization_roundtrip(&signature); - } - - #[test] - fn validate_known_signature() { - // In the event that this test fails, we need to consider pinning the version of the - // `ed25519-dalek` crate to maintain backwards compatibility with existing data on the - // Casper network. - - // Values taken from: - // https://github.com/dalek-cryptography/ed25519-dalek/blob/925eb9ea56192053c9eb93b9d30d1b9419eee128/TESTVECTORS#L62 - let secret_key_hex = "bf5ba5d6a49dd5ef7b4d5d7d3e4ecc505c01f6ccee4c54b5ef7b40af6a454140"; - let public_key_hex = "1be034f813017b900d8990af45fad5b5214b573bd303ef7a75ef4b8c5c5b9842"; - let message_hex = - "16152c2e037b1c0d3219ced8e0674aee6b57834b55106c5344625322da638ecea2fc9a424a05ee9512\ - d48fcf75dd8bd4691b3c10c28ec98ee1afa5b863d1c36795ed18105db3a9aabd9d2b4c1747adbaf1a56\ - ffcc0c533c1c0faef331cdb79d961fa39f880a1b8b1164741822efb15a7259a465bef212855751fab66\ - a897bfa211abe0ea2f2e1cd8a11d80e142cde1263eec267a3138ae1fcf4099db0ab53d64f336f4bcd7a\ - 363f6db112c0a2453051a0006f813aaf4ae948a2090619374fa58052409c28ef76225687df3cb2d1b0b\ - fb43b09f47f1232f790e6d8dea759e57942099f4c4bd3390f28afc2098244961465c643fc8b29766af2\ - bcbc5440b86e83608cfc937be98bb4827fd5e6b689adc2e26513db531076a6564396255a09975b7034d\ - ac06461b255642e3a7ed75fa9fc265011f5f6250382a84ac268d63ba64"; - let signature_hex = - "279cace6fdaf3945e3837df474b28646143747632bede93e7a66f5ca291d2c24978512ca0cb8827c8c\ - 322685bd605503a5ec94dbae61bbdcae1e49650602bc07"; - - let secret_key_bytes = base16::decode(secret_key_hex).unwrap(); - let public_key_bytes = base16::decode(public_key_hex).unwrap(); - let message_bytes = base16::decode(message_hex).unwrap(); - let signature_bytes = base16::decode(signature_hex).unwrap(); - - let secret_key = SecretKey::ed25519_from_bytes(secret_key_bytes).unwrap(); - let public_key = PublicKey::ed25519_from_bytes(public_key_bytes).unwrap(); - assert_eq!(public_key, PublicKey::from(&secret_key)); - - let signature = Signature::ed25519_from_bytes(signature_bytes).unwrap(); - assert_eq!(sign(&message_bytes, &secret_key, &public_key), signature); - assert!(verify(&message_bytes, &signature, &public_key).is_ok()); - } -} - -mod secp256k1 { - use rand::Rng; - - use super::*; - use crate::SECP256K1_TAG; - - const SECRET_KEY_LENGTH: usize = SecretKey::SECP256K1_LENGTH; - const SIGNATURE_LENGTH: usize = Signature::SECP256K1_LENGTH; - - #[test] - fn secret_key_from_bytes() { - // Secret key should be `SecretKey::SECP256K1_LENGTH` bytes. - // The k256 library will ensure that a byte stream of a length not equal to - // `SECP256K1_LENGTH` will fail due to an assertion internal to the library. - // We can check that invalid byte streams e.g [0;32] does not generate a valid key. - let bytes = [0; SECRET_KEY_LENGTH]; - assert!(SecretKey::secp256k1_from_bytes(&bytes[..]).is_err()); - - // Check that a valid byte stream produces a valid key - let bytes = [1; SECRET_KEY_LENGTH]; - assert!(SecretKey::secp256k1_from_bytes(&bytes[..]).is_ok()); - } - - #[test] - fn secret_key_to_and_from_der() { - let mut rng = TestRng::new(); - let secret_key = SecretKey::random_secp256k1(&mut rng); - secret_key_der_roundtrip(secret_key); - } - - #[test] - fn secret_key_to_and_from_pem() { - let mut rng = TestRng::new(); - let secret_key = SecretKey::random_secp256k1(&mut rng); - secret_key_pem_roundtrip(secret_key); - } - - #[test] - fn known_secret_key_to_pem() { - // Example values taken from Python client. - const KNOWN_KEY_PEM: &str = r#"-----BEGIN EC PRIVATE KEY----- -MHQCAQEEIL3fqaMKAfXSK1D2PnVVbZlZ7jTv133nukq4+95s6kmcoAcGBSuBBAAK -oUQDQgAEQI6VJjFv0fje9IDdRbLMcv/XMnccnOtdkv+kBR5u4ISEAkuc2TFWQHX0 -Yj9oTB9fx9+vvQdxJOhMtu46kGo0Uw== ------END EC PRIVATE KEY-----"#; - let key_bytes = - base16::decode("bddfa9a30a01f5d22b50f63e75556d9959ee34efd77de7ba4ab8fbde6cea499c") - .unwrap(); - let expected_key = SecretKey::secp256k1_from_bytes(key_bytes).unwrap(); - super::known_secret_key_to_pem(&expected_key, KNOWN_KEY_PEM, SECP256K1_TAG); - } - - #[test] - fn secret_key_to_and_from_file() { - let mut rng = TestRng::new(); - let secret_key = SecretKey::random_secp256k1(&mut rng); - secret_key_file_roundtrip(secret_key); - } - - #[test] - fn public_key_serialization_roundtrip() { - let mut rng = TestRng::new(); - let public_key = PublicKey::random_secp256k1(&mut rng); - super::public_key_serialization_roundtrip(public_key); - } - - #[test] - fn public_key_from_bytes() { - // Public key should be `PublicKey::SECP256K1_LENGTH` bytes. Create vec with an extra - // byte. - let mut rng = TestRng::new(); - let public_key = PublicKey::random_secp256k1(&mut rng); - let bytes: Vec = iter::once(rng.gen()) - .chain(Into::>::into(public_key)) - .collect::>(); - - assert!(PublicKey::secp256k1_from_bytes(&bytes[..]).is_err()); - assert!(PublicKey::secp256k1_from_bytes(&bytes[2..]).is_err()); - - // Check the same bytes but of the right length succeeds. - assert!(PublicKey::secp256k1_from_bytes(&bytes[1..]).is_ok()); - } - - #[test] - fn public_key_to_and_from_der() { - let mut rng = TestRng::new(); - let public_key = PublicKey::random_secp256k1(&mut rng); - public_key_der_roundtrip(public_key); - } - - #[test] - fn public_key_to_and_from_pem() { - let mut rng = TestRng::new(); - let public_key = PublicKey::random_secp256k1(&mut rng); - public_key_pem_roundtrip(public_key); - } - - #[test] - fn known_public_key_to_pem() { - // Example values taken from Python client. - const KNOWN_KEY_HEX: &str = - "03408e9526316fd1f8def480dd45b2cc72ffd732771c9ceb5d92ffa4051e6ee084"; - const KNOWN_KEY_PEM: &str = r#"-----BEGIN PUBLIC KEY----- -MFYwEAYHKoZIzj0CAQYFK4EEAAoDQgAEQI6VJjFv0fje9IDdRbLMcv/XMnccnOtd -kv+kBR5u4ISEAkuc2TFWQHX0Yj9oTB9fx9+vvQdxJOhMtu46kGo0Uw== ------END PUBLIC KEY-----"#; - super::known_public_key_to_pem(KNOWN_KEY_HEX, KNOWN_KEY_PEM); - } - - #[test] - fn public_key_to_and_from_file() { - let mut rng = TestRng::new(); - let public_key = PublicKey::random_secp256k1(&mut rng); - public_key_file_roundtrip(public_key); - } - - #[test] - fn public_key_to_and_from_hex() { - let mut rng = TestRng::new(); - let public_key = PublicKey::random_secp256k1(&mut rng); - public_key_hex_roundtrip(public_key); - } - - #[test] - fn signature_serialization_roundtrip() { - let mut rng = TestRng::new(); - let secret_key = SecretKey::random_secp256k1(&mut rng); - let public_key = PublicKey::from(&secret_key); - let data = b"data"; - let signature = sign(data, &secret_key, &public_key); - super::signature_serialization_roundtrip(signature); - } - - #[test] - fn bytesrepr_roundtrip_signature() { - let mut rng = TestRng::new(); - let secret_key = SecretKey::random_secp256k1(&mut rng); - let public_key = PublicKey::from(&secret_key); - let data = b"data"; - let signature = sign(data, &secret_key, &public_key); - bytesrepr::test_serialization_roundtrip(&signature); - } - - #[test] - fn signature_from_bytes() { - // Signature should be `Signature::SECP256K1_LENGTH` bytes. - let bytes = [2; SIGNATURE_LENGTH + 1]; - assert!(Signature::secp256k1_from_bytes(&bytes[..]).is_err()); - assert!(Signature::secp256k1_from_bytes(&bytes[2..]).is_err()); - - // Check the same bytes but of the right length succeeds. - assert!(Signature::secp256k1_from_bytes(&bytes[1..]).is_ok()); - } - - #[test] - fn signature_key_to_and_from_hex() { - let mut rng = TestRng::new(); - let secret_key = SecretKey::random_secp256k1(&mut rng); - let public_key = PublicKey::from(&secret_key); - let data = b"data"; - let signature = sign(data, &secret_key, &public_key); - signature_hex_roundtrip(signature); - } - - #[test] - fn public_key_traits() { - let mut rng = TestRng::new(); - let public_key1 = PublicKey::random_secp256k1(&mut rng); - let public_key2 = PublicKey::random_secp256k1(&mut rng); - if Into::>::into(public_key1.clone()) < Into::>::into(public_key2.clone()) { - check_ord_and_hash(public_key1, public_key2) - } else { - check_ord_and_hash(public_key2, public_key1) - } - } - - #[test] - fn public_key_to_account_hash() { - let mut rng = TestRng::new(); - let public_key = PublicKey::random_secp256k1(&mut rng); - assert_ne!( - public_key.to_account_hash().as_ref(), - Into::>::into(public_key) - ); - } - - #[test] - fn signature_traits() { - let signature_low = Signature::secp256k1([1; SIGNATURE_LENGTH]).unwrap(); - let signature_high = Signature::secp256k1([3; SIGNATURE_LENGTH]).unwrap(); - check_ord_and_hash(signature_low, signature_high) - } - - #[test] - fn validate_known_signature() { - // In the event that this test fails, we need to consider pinning the version of the - // `k256` crate to maintain backwards compatibility with existing data on the Casper - // network. - let secret_key_hex = "833fe62409237b9d62ec77587520911e9a759cec1d19755b7da901b96dca3d42"; - let public_key_hex = "028e24fd9654f12c793d3d376c15f7abe53e0fbd537884a3a98d10d2dc6d513b4e"; - let message_hex = "616263"; - let signature_hex = "8016162860f0795154643d15c5ab5bb840d8c695d6de027421755579ea7f2a4629b7e0c88fc3428669a6a89496f426181b73f10c6c8a05ac8f49d6cb5032eb89"; - - let secret_key_bytes = base16::decode(secret_key_hex).unwrap(); - let public_key_bytes = base16::decode(public_key_hex).unwrap(); - let message_bytes = base16::decode(message_hex).unwrap(); - let signature_bytes = base16::decode(signature_hex).unwrap(); - - let secret_key = SecretKey::secp256k1_from_bytes(secret_key_bytes).unwrap(); - let public_key = PublicKey::secp256k1_from_bytes(public_key_bytes).unwrap(); - assert_eq!(public_key, PublicKey::from(&secret_key)); - - let signature = Signature::secp256k1_from_bytes(signature_bytes).unwrap(); - assert_eq!(sign(&message_bytes, &secret_key, &public_key), signature); - assert!(verify(&message_bytes, &signature, &public_key).is_ok()); - } -} - -#[test] -fn public_key_traits() { - let system_key = PublicKey::system(); - let mut rng = TestRng::new(); - let ed25519_public_key = PublicKey::random_ed25519(&mut rng); - let secp256k1_public_key = PublicKey::random_secp256k1(&mut rng); - check_ord_and_hash(ed25519_public_key.clone(), secp256k1_public_key.clone()); - check_ord_and_hash(system_key.clone(), ed25519_public_key); - check_ord_and_hash(system_key, secp256k1_public_key); -} - -#[test] -fn signature_traits() { - let system_sig = Signature::system(); - let ed25519_sig = Signature::ed25519([3; Signature::ED25519_LENGTH]).unwrap(); - let secp256k1_sig = Signature::secp256k1([1; Signature::SECP256K1_LENGTH]).unwrap(); - check_ord_and_hash(ed25519_sig, secp256k1_sig); - check_ord_and_hash(system_sig, ed25519_sig); - check_ord_and_hash(system_sig, secp256k1_sig); -} - -#[test] -fn sign_and_verify() { - let mut rng = TestRng::new(); - let ed25519_secret_key = SecretKey::random_ed25519(&mut rng); - let secp256k1_secret_key = SecretKey::random_secp256k1(&mut rng); - - let ed25519_public_key = PublicKey::from(&ed25519_secret_key); - let secp256k1_public_key = PublicKey::from(&secp256k1_secret_key); - - let other_ed25519_public_key = PublicKey::random_ed25519(&mut rng); - let other_secp256k1_public_key = PublicKey::random_secp256k1(&mut rng); - - let message = b"message"; - let ed25519_signature = sign(message, &ed25519_secret_key, &ed25519_public_key); - let secp256k1_signature = sign(message, &secp256k1_secret_key, &secp256k1_public_key); - - assert!(verify(message, &ed25519_signature, &ed25519_public_key).is_ok()); - assert!(verify(message, &secp256k1_signature, &secp256k1_public_key).is_ok()); - - assert!(verify(message, &ed25519_signature, &other_ed25519_public_key).is_err()); - assert!(verify(message, &secp256k1_signature, &other_secp256k1_public_key).is_err()); - - assert!(verify(message, &ed25519_signature, &secp256k1_public_key).is_err()); - assert!(verify(message, &secp256k1_signature, &ed25519_public_key).is_err()); - - assert!(verify(&message[1..], &ed25519_signature, &ed25519_public_key).is_err()); - assert!(verify(&message[1..], &secp256k1_signature, &secp256k1_public_key).is_err()); -} - -#[test] -fn should_construct_secp256k1_from_uncompressed_bytes() { - let mut rng = TestRng::new(); - - let mut secret_key_bytes = [0u8; SecretKey::SECP256K1_LENGTH]; - rng.fill_bytes(&mut secret_key_bytes[..]); - - // Construct a secp256k1 secret key and use that to construct a public key. - let secp256k1_secret_key = k256::SecretKey::from_slice(&secret_key_bytes).unwrap(); - let secp256k1_public_key = secp256k1_secret_key.public_key(); - - // Construct a CL secret key and public key from that (which will be a compressed key). - let secret_key = SecretKey::secp256k1_from_bytes(secret_key_bytes).unwrap(); - let public_key = PublicKey::from(&secret_key); - assert_eq!( - Into::>::into(public_key.clone()).len(), - PublicKey::SECP256K1_LENGTH - ); - assert_ne!( - secp256k1_public_key - .to_encoded_point(false) - .as_bytes() - .len(), - PublicKey::SECP256K1_LENGTH - ); - - // Construct a CL public key from uncompressed public key bytes and ensure it's compressed. - let from_uncompressed_bytes = - PublicKey::secp256k1_from_bytes(secp256k1_public_key.to_encoded_point(false).as_bytes()) - .unwrap(); - assert_eq!(public_key, from_uncompressed_bytes); - - // Construct a CL public key from the uncompressed one's hex representation and ensure it's - // compressed. - let uncompressed_hex = { - let tag_bytes = vec![0x02u8]; - base16::encode_lower(&tag_bytes) - + &base16::encode_lower(&secp256k1_public_key.to_encoded_point(false).as_bytes()) - }; - - format!( - "02{}", - base16::encode_lower(secp256k1_public_key.to_encoded_point(false).as_bytes()) - .to_lowercase() - ); - let from_uncompressed_hex = PublicKey::from_hex(uncompressed_hex).unwrap(); - assert_eq!(public_key, from_uncompressed_hex); -} - -#[test] -fn generate_ed25519_should_generate_an_ed25519_key() { - let secret_key = SecretKey::generate_ed25519().unwrap(); - assert!(matches!(secret_key, SecretKey::Ed25519(_))) -} - -#[test] -fn generate_secp256k1_should_generate_an_secp256k1_key() { - let secret_key = SecretKey::generate_secp256k1().unwrap(); - assert!(matches!(secret_key, SecretKey::Secp256k1(_))) -} diff --git a/casper_types/src/crypto/error.rs b/casper_types/src/crypto/error.rs deleted file mode 100644 index 6750e61f..00000000 --- a/casper_types/src/crypto/error.rs +++ /dev/null @@ -1,111 +0,0 @@ -use alloc::string::String; -use core::fmt::Debug; -#[cfg(not(any(feature = "std", test)))] -use core::fmt::{self, Display, Formatter}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -use ed25519_dalek::ed25519::Error as SignatureError; -#[cfg(any(feature = "std", test))] -use pem::PemError; -#[cfg(any(feature = "std", test))] -use thiserror::Error; - -#[cfg(any(feature = "std", test))] -use crate::file_utils::{ReadFileError, WriteFileError}; - -/// Cryptographic errors. -#[derive(Clone, PartialEq, Eq, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(any(feature = "std", test), derive(Error))] -#[non_exhaustive] -pub enum Error { - /// Error resulting from creating or using asymmetric key types. - #[cfg_attr(any(feature = "std", test), error("asymmetric key error: {0}"))] - AsymmetricKey(String), - - /// Error resulting when decoding a type from a hex-encoded representation. - #[cfg_attr(feature = "datasize", data_size(skip))] - #[cfg_attr(any(feature = "std", test), error("parsing from hex: {0}"))] - FromHex(base16::DecodeError), - - /// Error resulting when decoding a type from a base64 representation. - #[cfg_attr(feature = "datasize", data_size(skip))] - #[cfg_attr(any(feature = "std", test), error("decoding error: {0}"))] - FromBase64(base64::DecodeError), - - /// Signature error. - #[cfg_attr(any(feature = "std", test), error("error in signature"))] - SignatureError, - - /// Error trying to manipulate the system key. - #[cfg_attr( - any(feature = "std", test), - error("invalid operation on system key: {0}") - )] - System(String), -} - -#[cfg(not(any(feature = "std", test)))] -impl Display for Error { - fn fmt(&self, formatter: &mut Formatter<'_>) -> fmt::Result { - Debug::fmt(self, formatter) - } -} - -impl From for Error { - fn from(error: base16::DecodeError) -> Self { - Error::FromHex(error) - } -} - -impl From for Error { - fn from(_error: SignatureError) -> Self { - Error::SignatureError - } -} - -/// Cryptographic errors extended with some additional variants. -#[cfg(any(feature = "std", test))] -#[derive(Debug, Error)] -#[non_exhaustive] -pub enum ErrorExt { - /// A basic crypto error. - #[error("crypto error: {0:?}")] - CryptoError(#[from] Error), - - /// Error trying to read a secret key. - #[error("secret key load failed: {0}")] - SecretKeyLoad(ReadFileError), - - /// Error trying to read a public key. - #[error("public key load failed: {0}")] - PublicKeyLoad(ReadFileError), - - /// Error trying to write a secret key. - #[error("secret key save failed: {0}")] - SecretKeySave(WriteFileError), - - /// Error trying to write a public key. - #[error("public key save failed: {0}")] - PublicKeySave(WriteFileError), - - /// Pem format error. - #[error("pem error: {0}")] - FromPem(String), - - /// DER format error. - #[error("der error: {0}")] - FromDer(#[from] derp::Error), - - /// Error in getting random bytes from the system's preferred random number source. - #[error("failed to get random bytes: {0}")] - GetRandomBytes(#[from] getrandom::Error), -} - -#[cfg(any(feature = "std", test))] -impl From for ErrorExt { - fn from(error: PemError) -> Self { - ErrorExt::FromPem(error.to_string()) - } -} diff --git a/casper_types/src/deploy_info.rs b/casper_types/src/deploy_info.rs deleted file mode 100644 index 5108f5db..00000000 --- a/casper_types/src/deploy_info.rs +++ /dev/null @@ -1,172 +0,0 @@ -// TODO - remove once schemars stops causing warning. -#![allow(clippy::field_reassign_with_default)] - -use alloc::vec::Vec; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -use crate::{ - account::AccountHash, - bytesrepr::{self, FromBytes, ToBytes}, - DeployHash, TransferAddr, URef, U512, -}; - -/// Information relating to the given Deploy. -#[derive(Debug, Clone, Ord, PartialOrd, Eq, PartialEq, Serialize, Deserialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[serde(deny_unknown_fields)] -pub struct DeployInfo { - /// The relevant Deploy. - pub deploy_hash: DeployHash, - /// Transfers performed by the Deploy. - pub transfers: Vec, - /// Account identifier of the creator of the Deploy. - pub from: AccountHash, - /// Source purse used for payment of the Deploy. - pub source: URef, - /// Gas cost of executing the Deploy. - pub gas: U512, -} - -impl DeployInfo { - /// Creates a [`DeployInfo`]. - pub fn new( - deploy_hash: DeployHash, - transfers: &[TransferAddr], - from: AccountHash, - source: URef, - gas: U512, - ) -> Self { - let transfers = transfers.to_vec(); - DeployInfo { - deploy_hash, - transfers, - from, - source, - gas, - } - } -} - -impl FromBytes for DeployInfo { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (deploy_hash, rem) = DeployHash::from_bytes(bytes)?; - let (transfers, rem) = Vec::::from_bytes(rem)?; - let (from, rem) = AccountHash::from_bytes(rem)?; - let (source, rem) = URef::from_bytes(rem)?; - let (gas, rem) = U512::from_bytes(rem)?; - Ok(( - DeployInfo { - deploy_hash, - transfers, - from, - source, - gas, - }, - rem, - )) - } -} - -impl ToBytes for DeployInfo { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut result = bytesrepr::allocate_buffer(self)?; - self.deploy_hash.write_bytes(&mut result)?; - self.transfers.write_bytes(&mut result)?; - self.from.write_bytes(&mut result)?; - self.source.write_bytes(&mut result)?; - self.gas.write_bytes(&mut result)?; - Ok(result) - } - - fn serialized_length(&self) -> usize { - self.deploy_hash.serialized_length() - + self.transfers.serialized_length() - + self.from.serialized_length() - + self.source.serialized_length() - + self.gas.serialized_length() - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.deploy_hash.write_bytes(writer)?; - self.transfers.write_bytes(writer)?; - self.from.write_bytes(writer)?; - self.source.write_bytes(writer)?; - self.gas.write_bytes(writer)?; - Ok(()) - } -} - -/// Generators for a `Deploy` -#[cfg(any(feature = "testing", feature = "gens", test))] -pub(crate) mod gens { - use alloc::vec::Vec; - - use proptest::{ - array, - collection::{self, SizeRange}, - prelude::{Arbitrary, Strategy}, - }; - - use crate::{ - account::AccountHash, - gens::{u512_arb, uref_arb}, - DeployHash, DeployInfo, TransferAddr, - }; - - pub fn deploy_hash_arb() -> impl Strategy { - array::uniform32(::arbitrary()).prop_map(DeployHash::new) - } - - pub fn transfer_addr_arb() -> impl Strategy { - array::uniform32(::arbitrary()).prop_map(TransferAddr::new) - } - - pub fn transfers_arb(size: impl Into) -> impl Strategy> { - collection::vec(transfer_addr_arb(), size) - } - - pub fn account_hash_arb() -> impl Strategy { - array::uniform32(::arbitrary()).prop_map(AccountHash::new) - } - - /// Creates an arbitrary `Deploy` - pub fn deploy_info_arb() -> impl Strategy { - let transfers_length_range = 0..5; - ( - deploy_hash_arb(), - transfers_arb(transfers_length_range), - account_hash_arb(), - uref_arb(), - u512_arb(), - ) - .prop_map(|(deploy_hash, transfers, from, source, gas)| DeployInfo { - deploy_hash, - transfers, - from, - source, - gas, - }) - } -} - -#[cfg(test)] -mod tests { - use proptest::prelude::*; - - use crate::bytesrepr; - - use super::gens; - - proptest! { - #[test] - fn test_serialization_roundtrip(deploy_info in gens::deploy_info_arb()) { - bytesrepr::test_serialization_roundtrip(&deploy_info) - } - } -} diff --git a/casper_types/src/era_id.rs b/casper_types/src/era_id.rs deleted file mode 100644 index 9fc35cc3..00000000 --- a/casper_types/src/era_id.rs +++ /dev/null @@ -1,241 +0,0 @@ -// TODO - remove once schemars stops causing warning. -#![allow(clippy::field_reassign_with_default)] - -use alloc::vec::Vec; -use core::{ - fmt::{self, Debug, Display, Formatter}, - num::ParseIntError, - ops::{Add, AddAssign, Sub}, - str::FromStr, -}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -use rand::{ - distributions::{Distribution, Standard}, - Rng, -}; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - CLType, CLTyped, -}; - -/// Era ID newtype. -#[derive( - Debug, Default, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize, -)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[cfg_attr(feature = "testing", derive(proptest_derive::Arbitrary))] -#[serde(deny_unknown_fields)] -pub struct EraId(u64); - -impl EraId { - /// Maximum possible value an [`EraId`] can hold. - pub const MAX: EraId = EraId(u64::max_value()); - - /// Creates new [`EraId`] instance. - pub const fn new(value: u64) -> EraId { - EraId(value) - } - - /// Returns an iterator over era IDs of `num_eras` future eras starting from current. - pub fn iter(&self, num_eras: u64) -> impl Iterator { - let current_era_id = self.0; - (current_era_id..current_era_id + num_eras).map(EraId) - } - - /// Returns an iterator over era IDs of `num_eras` future eras starting from current, plus the - /// provided one. - pub fn iter_inclusive(&self, num_eras: u64) -> impl Iterator { - let current_era_id = self.0; - (current_era_id..=current_era_id + num_eras).map(EraId) - } - - /// Returns a successor to current era. - /// - /// For `u64::MAX`, this returns `u64::MAX` again: We want to make sure this doesn't panic, and - /// that era number will never be reached in practice. - #[must_use] - pub fn successor(self) -> EraId { - EraId::from(self.0.saturating_add(1)) - } - - /// Returns the predecessor to current era, or `None` if genesis. - #[must_use] - pub fn predecessor(self) -> Option { - self.0.checked_sub(1).map(EraId) - } - - /// Returns the current era plus `x`, or `None` if that would overflow - pub fn checked_add(&self, x: u64) -> Option { - self.0.checked_add(x).map(EraId) - } - - /// Returns the current era minus `x`, or `None` if that would be less than `0`. - pub fn checked_sub(&self, x: u64) -> Option { - self.0.checked_sub(x).map(EraId) - } - - /// Returns the current era minus `x`, or `0` if that would be less than `0`. - #[must_use] - pub fn saturating_sub(&self, x: u64) -> EraId { - EraId::from(self.0.saturating_sub(x)) - } - - /// Returns the current era plus `x`, or [`EraId::MAX`] if overflow would occur. - #[must_use] - pub fn saturating_add(self, rhs: u64) -> EraId { - EraId(self.0.saturating_add(rhs)) - } - - /// Returns the current era times `x`, or [`EraId::MAX`] if overflow would occur. - #[must_use] - pub fn saturating_mul(&self, x: u64) -> EraId { - EraId::from(self.0.saturating_mul(x)) - } - - /// Returns whether this is era 0. - pub fn is_genesis(&self) -> bool { - self.0 == 0 - } - - /// Returns little endian bytes. - pub fn to_le_bytes(self) -> [u8; 8] { - self.0.to_le_bytes() - } - - /// Returns a raw value held by this [`EraId`] instance. - /// - /// You should prefer [`From`] trait implementations over this method where possible. - pub fn value(self) -> u64 { - self.0 - } -} - -impl FromStr for EraId { - type Err = ParseIntError; - - fn from_str(s: &str) -> Result { - u64::from_str(s).map(EraId) - } -} - -impl Add for EraId { - type Output = EraId; - - #[allow(clippy::arithmetic_side_effects)] // The caller must make sure this doesn't overflow. - fn add(self, x: u64) -> EraId { - EraId::from(self.0 + x) - } -} - -impl AddAssign for EraId { - fn add_assign(&mut self, x: u64) { - self.0 += x; - } -} - -impl Sub for EraId { - type Output = EraId; - - #[allow(clippy::arithmetic_side_effects)] // The caller must make sure this doesn't overflow. - fn sub(self, x: u64) -> EraId { - EraId::from(self.0 - x) - } -} - -impl Display for EraId { - fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { - write!(f, "era {}", self.0) - } -} - -impl From for u64 { - fn from(era_id: EraId) -> Self { - era_id.value() - } -} - -impl From for EraId { - fn from(era_id: u64) -> Self { - EraId(era_id) - } -} - -impl ToBytes for EraId { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - self.0.to_bytes() - } - - fn serialized_length(&self) -> usize { - self.0.serialized_length() - } - - #[inline(always)] - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.0.write_bytes(writer)?; - Ok(()) - } -} - -impl FromBytes for EraId { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (id_value, remainder) = u64::from_bytes(bytes)?; - let era_id = EraId::from(id_value); - Ok((era_id, remainder)) - } -} - -impl CLTyped for EraId { - fn cl_type() -> CLType { - CLType::U64 - } -} - -impl Distribution for Standard { - fn sample(&self, rng: &mut R) -> EraId { - EraId(rng.gen_range(0..1_000_000)) - } -} - -#[cfg(test)] -mod tests { - use proptest::prelude::*; - - use super::*; - use crate::gens::era_id_arb; - - #[test] - fn should_calculate_correct_inclusive_future_eras() { - let auction_delay = 3; - - let current_era = EraId::from(42); - - let window: Vec = current_era.iter_inclusive(auction_delay).collect(); - assert_eq!(window.len(), auction_delay as usize + 1); - assert_eq!(window.first(), Some(¤t_era)); - assert_eq!( - window.iter().next_back(), - Some(&(current_era + auction_delay)) - ); - } - - #[test] - fn should_have_valid_genesis_era_id() { - let expected_initial_era_id = EraId::from(0); - assert!(expected_initial_era_id.is_genesis()); - assert!(!expected_initial_era_id.successor().is_genesis()) - } - - proptest! { - #[test] - fn bytesrepr_roundtrip(era_id in era_id_arb()) { - bytesrepr::test_serialization_roundtrip(&era_id); - } - } -} diff --git a/casper_types/src/execution_result.rs b/casper_types/src/execution_result.rs deleted file mode 100644 index 87788fc9..00000000 --- a/casper_types/src/execution_result.rs +++ /dev/null @@ -1,814 +0,0 @@ -//! This file provides types to allow conversion from an EE `ExecutionResult` into a similar type -//! which can be serialized to a valid binary or JSON representation. -//! -//! It is stored as metadata related to a given deploy, and made available to clients via the -//! JSON-RPC API. - -// TODO - remove once schemars stops causing warning. -#![allow(clippy::field_reassign_with_default)] - -use core::convert::TryFrom; - -use alloc::{ - boxed::Box, - format, - string::{String, ToString}, - vec, - vec::Vec, -}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -use num::{FromPrimitive, ToPrimitive}; -use num_derive::{FromPrimitive, ToPrimitive}; -#[cfg(feature = "json-schema")] -use once_cell::sync::Lazy; -use rand::{ - distributions::{Distribution, Standard}, - seq::SliceRandom, - Rng, -}; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -#[cfg(feature = "json-schema")] -use crate::KEY_HASH_LENGTH; -use crate::{ - account::AccountHash, - bytesrepr::{self, FromBytes, ToBytes, U8_SERIALIZED_LENGTH}, - system::auction::{Bid, EraInfo, UnbondingPurse, WithdrawPurse}, - CLValue, DeployInfo, NamedKey, Transfer, TransferAddr, U128, U256, U512, -}; - -#[derive(FromPrimitive, ToPrimitive, Debug)] -#[repr(u8)] -enum ExecutionResultTag { - Failure = 0, - Success = 1, -} - -impl TryFrom for ExecutionResultTag { - type Error = bytesrepr::Error; - - fn try_from(value: u8) -> Result { - FromPrimitive::from_u8(value).ok_or(bytesrepr::Error::Formatting) - } -} - -#[derive(FromPrimitive, ToPrimitive, Debug)] -#[repr(u8)] -enum OpTag { - Read = 0, - Write = 1, - Add = 2, - NoOp = 3, - Delete = 4, -} - -impl TryFrom for OpTag { - type Error = bytesrepr::Error; - - fn try_from(value: u8) -> Result { - FromPrimitive::from_u8(value).ok_or(bytesrepr::Error::Formatting) - } -} - -#[derive(FromPrimitive, ToPrimitive, Debug)] -#[repr(u8)] -enum TransformTag { - Identity = 0, - WriteCLValue = 1, - WriteAccount = 2, - WriteContractWasm = 3, - WriteContract = 4, - WriteContractPackage = 5, - WriteDeployInfo = 6, - WriteTransfer = 7, - WriteEraInfo = 8, - WriteBid = 9, - WriteWithdraw = 10, - AddInt32 = 11, - AddUInt64 = 12, - AddUInt128 = 13, - AddUInt256 = 14, - AddUInt512 = 15, - AddKeys = 16, - Failure = 17, - WriteUnbonding = 18, - Prune = 19, -} - -impl TryFrom for TransformTag { - type Error = bytesrepr::Error; - - fn try_from(value: u8) -> Result { - FromPrimitive::from_u8(value).ok_or(bytesrepr::Error::Formatting) - } -} - -#[cfg(feature = "json-schema")] -static EXECUTION_RESULT: Lazy = Lazy::new(|| { - let operations = vec![ - Operation { - key: "account-hash-2c4a11c062a8a337bfc97e27fd66291caeb2c65865dcb5d3ef3759c4c97efecb" - .to_string(), - kind: OpKind::Write, - }, - Operation { - key: "deploy-af684263911154d26fa05be9963171802801a0b6aff8f199b7391eacb8edc9e1" - .to_string(), - kind: OpKind::Read, - }, - ]; - - let transforms = vec![ - TransformEntry { - key: "uref-2c4a11c062a8a337bfc97e27fd66291caeb2c65865dcb5d3ef3759c4c97efecb-007" - .to_string(), - transform: Transform::AddUInt64(8u64), - }, - TransformEntry { - key: "deploy-af684263911154d26fa05be9963171802801a0b6aff8f199b7391eacb8edc9e1" - .to_string(), - transform: Transform::Identity, - }, - ]; - - let effect = ExecutionEffect { - operations, - transforms, - }; - - let transfers = vec![ - TransferAddr::new([89; KEY_HASH_LENGTH]), - TransferAddr::new([130; KEY_HASH_LENGTH]), - ]; - - ExecutionResult::Success { - effect, - transfers, - cost: U512::from(123_456), - } -}); - -/// The result of executing a single deploy. -#[derive(Clone, Eq, PartialEq, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[serde(deny_unknown_fields)] -pub enum ExecutionResult { - /// The result of a failed execution. - Failure { - /// The effect of executing the deploy. - effect: ExecutionEffect, - /// A record of Transfers performed while executing the deploy. - transfers: Vec, - /// The cost of executing the deploy. - cost: U512, - /// The error message associated with executing the deploy. - error_message: String, - }, - /// The result of a successful execution. - Success { - /// The effect of executing the deploy. - effect: ExecutionEffect, - /// A record of Transfers performed while executing the deploy. - transfers: Vec, - /// The cost of executing the deploy. - cost: U512, - }, -} - -impl ExecutionResult { - // This method is not intended to be used by third party crates. - #[doc(hidden)] - #[cfg(feature = "json-schema")] - pub fn example() -> &'static Self { - &EXECUTION_RESULT - } - - fn tag(&self) -> ExecutionResultTag { - match self { - ExecutionResult::Failure { - effect: _, - transfers: _, - cost: _, - error_message: _, - } => ExecutionResultTag::Failure, - ExecutionResult::Success { - effect: _, - transfers: _, - cost: _, - } => ExecutionResultTag::Success, - } - } -} - -impl Distribution for Standard { - fn sample(&self, rng: &mut R) -> ExecutionResult { - let op_count = rng.gen_range(0..6); - let mut operations = Vec::new(); - for _ in 0..op_count { - let op = [OpKind::Read, OpKind::Add, OpKind::NoOp, OpKind::Write] - .choose(rng) - .unwrap(); - operations.push(Operation { - key: rng.gen::().to_string(), - kind: *op, - }); - } - - let transform_count = rng.gen_range(0..6); - let mut transforms = Vec::new(); - for _ in 0..transform_count { - transforms.push(TransformEntry { - key: rng.gen::().to_string(), - transform: rng.gen(), - }); - } - - let execution_effect = ExecutionEffect::new(transforms); - - let transfer_count = rng.gen_range(0..6); - let mut transfers = vec![]; - for _ in 0..transfer_count { - transfers.push(TransferAddr::new(rng.gen())) - } - - if rng.gen() { - ExecutionResult::Failure { - effect: execution_effect, - transfers, - cost: rng.gen::().into(), - error_message: format!("Error message {}", rng.gen::()), - } - } else { - ExecutionResult::Success { - effect: execution_effect, - transfers, - cost: rng.gen::().into(), - } - } - } -} - -// TODO[goral09]: Add `write_bytes` impl. -impl ToBytes for ExecutionResult { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - let tag_byte = self.tag().to_u8().ok_or(bytesrepr::Error::Formatting)?; - buffer.push(tag_byte); - match self { - ExecutionResult::Failure { - effect, - transfers, - cost, - error_message, - } => { - buffer.extend(effect.to_bytes()?); - buffer.extend(transfers.to_bytes()?); - buffer.extend(cost.to_bytes()?); - buffer.extend(error_message.to_bytes()?); - } - ExecutionResult::Success { - effect, - transfers, - cost, - } => { - buffer.extend(effect.to_bytes()?); - buffer.extend(transfers.to_bytes()?); - buffer.extend(cost.to_bytes()?); - } - } - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - U8_SERIALIZED_LENGTH - + match self { - ExecutionResult::Failure { - effect: execution_effect, - transfers, - cost, - error_message, - } => { - execution_effect.serialized_length() - + transfers.serialized_length() - + cost.serialized_length() - + error_message.serialized_length() - } - ExecutionResult::Success { - effect: execution_effect, - transfers, - cost, - } => { - execution_effect.serialized_length() - + transfers.serialized_length() - + cost.serialized_length() - } - } - } -} - -impl FromBytes for ExecutionResult { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (tag, remainder) = u8::from_bytes(bytes)?; - match TryFrom::try_from(tag)? { - ExecutionResultTag::Failure => { - let (effect, remainder) = ExecutionEffect::from_bytes(remainder)?; - let (transfers, remainder) = Vec::::from_bytes(remainder)?; - let (cost, remainder) = U512::from_bytes(remainder)?; - let (error_message, remainder) = String::from_bytes(remainder)?; - let execution_result = ExecutionResult::Failure { - effect, - transfers, - cost, - error_message, - }; - Ok((execution_result, remainder)) - } - ExecutionResultTag::Success => { - let (execution_effect, remainder) = ExecutionEffect::from_bytes(remainder)?; - let (transfers, remainder) = Vec::::from_bytes(remainder)?; - let (cost, remainder) = U512::from_bytes(remainder)?; - let execution_result = ExecutionResult::Success { - effect: execution_effect, - transfers, - cost, - }; - Ok((execution_result, remainder)) - } - } - } -} - -/// The journal of execution transforms from a single deploy. -#[derive(Clone, Eq, PartialEq, Serialize, Deserialize, Default, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[serde(deny_unknown_fields)] -pub struct ExecutionEffect { - /// The resulting operations. - pub operations: Vec, - /// The journal of execution transforms. - pub transforms: Vec, -} - -impl ExecutionEffect { - /// Constructor for [`ExecutionEffect`]. - pub fn new(transforms: Vec) -> Self { - Self { - transforms, - operations: Default::default(), - } - } -} - -// TODO[goral09]: Add `write_bytes` impl. -impl ToBytes for ExecutionEffect { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - buffer.extend(self.operations.to_bytes()?); - buffer.extend(self.transforms.to_bytes()?); - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - self.operations.serialized_length() + self.transforms.serialized_length() - } -} - -impl FromBytes for ExecutionEffect { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (operations, remainder) = Vec::::from_bytes(bytes)?; - let (transforms, remainder) = Vec::::from_bytes(remainder)?; - let json_execution_journal = ExecutionEffect { - operations, - transforms, - }; - Ok((json_execution_journal, remainder)) - } -} - -/// An operation performed while executing a deploy. -#[derive(Clone, Eq, PartialEq, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[serde(deny_unknown_fields)] -pub struct Operation { - /// The formatted string of the `Key`. - pub key: String, - /// The type of operation. - pub kind: OpKind, -} - -// TODO[goral09]: Add `write_bytes` impl. -impl ToBytes for Operation { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - buffer.extend(self.key.to_bytes()?); - buffer.extend(self.kind.to_bytes()?); - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - self.key.serialized_length() + self.kind.serialized_length() - } -} - -impl FromBytes for Operation { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (key, remainder) = String::from_bytes(bytes)?; - let (kind, remainder) = OpKind::from_bytes(remainder)?; - let operation = Operation { key, kind }; - Ok((operation, remainder)) - } -} - -/// The type of operation performed while executing a deploy. -#[derive(Clone, Copy, Eq, PartialEq, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[serde(deny_unknown_fields)] -pub enum OpKind { - /// A read operation. - Read, - /// A write operation. - Write, - /// An addition. - Add, - /// An operation which has no effect. - NoOp, - /// A delete operation. - Delete, -} - -impl OpKind { - fn tag(&self) -> OpTag { - match self { - OpKind::Read => OpTag::Read, - OpKind::Write => OpTag::Write, - OpKind::Add => OpTag::Add, - OpKind::NoOp => OpTag::NoOp, - OpKind::Delete => OpTag::Delete, - } - } -} - -// TODO[goral09]: Add `write_bytes` impl. -impl ToBytes for OpKind { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let tag_bytes = self.tag().to_u8().ok_or(bytesrepr::Error::Formatting)?; - tag_bytes.to_bytes() - } - - fn serialized_length(&self) -> usize { - U8_SERIALIZED_LENGTH - } -} - -impl FromBytes for OpKind { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (tag, remainder) = u8::from_bytes(bytes)?; - match TryFrom::try_from(tag)? { - OpTag::Read => Ok((OpKind::Read, remainder)), - OpTag::Write => Ok((OpKind::Write, remainder)), - OpTag::Add => Ok((OpKind::Add, remainder)), - OpTag::NoOp => Ok((OpKind::NoOp, remainder)), - OpTag::Delete => Ok((OpKind::Delete, remainder)), - } - } -} - -/// A transformation performed while executing a deploy. -#[derive(Clone, Eq, PartialEq, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[serde(deny_unknown_fields)] -pub struct TransformEntry { - /// The formatted string of the `Key`. - pub key: String, - /// The transformation. - pub transform: Transform, -} - -// TODO[goral09]: Add `write_bytes`. -impl ToBytes for TransformEntry { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - buffer.extend(self.key.to_bytes()?); - buffer.extend(self.transform.to_bytes()?); - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - self.key.serialized_length() + self.transform.serialized_length() - } -} - -impl FromBytes for TransformEntry { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (key, remainder) = String::from_bytes(bytes)?; - let (transform, remainder) = Transform::from_bytes(remainder)?; - let transform_entry = TransformEntry { key, transform }; - Ok((transform_entry, remainder)) - } -} - -/// The actual transformation performed while executing a deploy. -#[derive(Clone, Eq, PartialEq, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[serde(deny_unknown_fields)] -pub enum Transform { - /// A transform having no effect. - Identity, - /// Writes the given CLValue to global state. - WriteCLValue(CLValue), - /// Writes the given Account to global state. - WriteAccount(AccountHash), - /// Writes a smart contract as Wasm to global state. - WriteContractWasm, - /// Writes a smart contract to global state. - WriteContract, - /// Writes a smart contract package to global state. - WriteContractPackage, - /// Writes the given DeployInfo to global state. - WriteDeployInfo(DeployInfo), - /// Writes the given EraInfo to global state. - WriteEraInfo(EraInfo), - /// Writes the given Transfer to global state. - WriteTransfer(Transfer), - /// Writes the given Bid to global state. - WriteBid(Box), - /// Writes the given Withdraw to global state. - WriteWithdraw(Vec), - /// Adds the given `i32`. - AddInt32(i32), - /// Adds the given `u64`. - AddUInt64(u64), - /// Adds the given `U128`. - AddUInt128(U128), - /// Adds the given `U256`. - AddUInt256(U256), - /// Adds the given `U512`. - AddUInt512(U512), - /// Adds the given collection of named keys. - AddKeys(Vec), - /// A failed transformation, containing an error message. - Failure(String), - /// Writes the given Unbonding to global state. - WriteUnbonding(Vec), - /// Prunes a key. - Prune, -} - -impl Transform { - fn tag(&self) -> TransformTag { - match self { - Transform::Identity => TransformTag::Identity, - Transform::WriteCLValue(_) => TransformTag::WriteCLValue, - Transform::WriteAccount(_) => TransformTag::WriteAccount, - Transform::WriteContractWasm => TransformTag::WriteContractWasm, - Transform::WriteContract => TransformTag::WriteContract, - Transform::WriteContractPackage => TransformTag::WriteContractPackage, - Transform::WriteDeployInfo(_) => TransformTag::WriteDeployInfo, - Transform::WriteEraInfo(_) => TransformTag::WriteEraInfo, - Transform::WriteTransfer(_) => TransformTag::WriteTransfer, - Transform::WriteBid(_) => TransformTag::WriteBid, - Transform::WriteWithdraw(_) => TransformTag::WriteWithdraw, - Transform::AddInt32(_) => TransformTag::AddInt32, - Transform::AddUInt64(_) => TransformTag::AddUInt64, - Transform::AddUInt128(_) => TransformTag::AddUInt128, - Transform::AddUInt256(_) => TransformTag::AddUInt256, - Transform::AddUInt512(_) => TransformTag::AddUInt512, - Transform::AddKeys(_) => TransformTag::AddKeys, - Transform::Failure(_) => TransformTag::Failure, - Transform::WriteUnbonding(_) => TransformTag::WriteUnbonding, - Transform::Prune => TransformTag::Prune, - } - } -} - -// TODO[goral09]: Add `write_bytes` impl. -impl ToBytes for Transform { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - let tag_bytes = self.tag().to_u8().ok_or(bytesrepr::Error::Formatting)?; - buffer.insert(0, tag_bytes); - match self { - Transform::Identity => {} - Transform::WriteCLValue(value) => { - buffer.extend(value.to_bytes()?); - } - Transform::WriteAccount(account_hash) => { - buffer.extend(account_hash.to_bytes()?); - } - Transform::WriteContractWasm => {} - Transform::WriteContract => {} - Transform::WriteContractPackage => {} - Transform::WriteDeployInfo(deploy_info) => { - buffer.extend(deploy_info.to_bytes()?); - } - Transform::WriteEraInfo(era_info) => { - buffer.extend(era_info.to_bytes()?); - } - Transform::WriteTransfer(transfer) => { - buffer.extend(transfer.to_bytes()?); - } - Transform::WriteBid(bid) => { - buffer.extend(bid.to_bytes()?); - } - Transform::WriteWithdraw(unbonding_purses) => { - buffer.extend(unbonding_purses.to_bytes()?); - } - Transform::AddInt32(value) => { - buffer.extend(value.to_bytes()?); - } - Transform::AddUInt64(value) => { - buffer.extend(value.to_bytes()?); - } - Transform::AddUInt128(value) => { - buffer.extend(value.to_bytes()?); - } - Transform::AddUInt256(value) => { - buffer.extend(value.to_bytes()?); - } - Transform::AddUInt512(value) => { - buffer.extend(value.to_bytes()?); - } - Transform::AddKeys(value) => { - buffer.extend(value.to_bytes()?); - } - Transform::Failure(value) => { - buffer.extend(value.to_bytes()?); - } - Transform::WriteUnbonding(value) => { - buffer.extend(value.to_bytes()?); - } - Transform::Prune => {} - } - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - let body_len = match self { - Transform::WriteCLValue(value) => value.serialized_length(), - Transform::WriteAccount(value) => value.serialized_length(), - Transform::WriteDeployInfo(value) => value.serialized_length(), - Transform::WriteEraInfo(value) => value.serialized_length(), - Transform::WriteTransfer(value) => value.serialized_length(), - Transform::AddInt32(value) => value.serialized_length(), - Transform::AddUInt64(value) => value.serialized_length(), - Transform::AddUInt128(value) => value.serialized_length(), - Transform::AddUInt256(value) => value.serialized_length(), - Transform::AddUInt512(value) => value.serialized_length(), - Transform::AddKeys(value) => value.serialized_length(), - Transform::Failure(value) => value.serialized_length(), - Transform::Identity - | Transform::WriteContractWasm - | Transform::WriteContract - | Transform::WriteContractPackage => 0, - Transform::WriteBid(value) => value.serialized_length(), - Transform::WriteWithdraw(value) => value.serialized_length(), - Transform::WriteUnbonding(value) => value.serialized_length(), - Transform::Prune => 0, - }; - U8_SERIALIZED_LENGTH + body_len - } -} - -impl FromBytes for Transform { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (tag, remainder) = u8::from_bytes(bytes)?; - match TryFrom::try_from(tag)? { - TransformTag::Identity => Ok((Transform::Identity, remainder)), - TransformTag::WriteCLValue => { - let (cl_value, remainder) = CLValue::from_bytes(remainder)?; - Ok((Transform::WriteCLValue(cl_value), remainder)) - } - TransformTag::WriteAccount => { - let (account_hash, remainder) = AccountHash::from_bytes(remainder)?; - Ok((Transform::WriteAccount(account_hash), remainder)) - } - TransformTag::WriteContractWasm => Ok((Transform::WriteContractWasm, remainder)), - TransformTag::WriteContract => Ok((Transform::WriteContract, remainder)), - TransformTag::WriteContractPackage => Ok((Transform::WriteContractPackage, remainder)), - TransformTag::WriteDeployInfo => { - let (deploy_info, remainder) = DeployInfo::from_bytes(remainder)?; - Ok((Transform::WriteDeployInfo(deploy_info), remainder)) - } - TransformTag::WriteEraInfo => { - let (era_info, remainder) = EraInfo::from_bytes(remainder)?; - Ok((Transform::WriteEraInfo(era_info), remainder)) - } - TransformTag::WriteTransfer => { - let (transfer, remainder) = Transfer::from_bytes(remainder)?; - Ok((Transform::WriteTransfer(transfer), remainder)) - } - TransformTag::AddInt32 => { - let (value_i32, remainder) = i32::from_bytes(remainder)?; - Ok((Transform::AddInt32(value_i32), remainder)) - } - TransformTag::AddUInt64 => { - let (value_u64, remainder) = u64::from_bytes(remainder)?; - Ok((Transform::AddUInt64(value_u64), remainder)) - } - TransformTag::AddUInt128 => { - let (value_u128, remainder) = U128::from_bytes(remainder)?; - Ok((Transform::AddUInt128(value_u128), remainder)) - } - TransformTag::AddUInt256 => { - let (value_u256, remainder) = U256::from_bytes(remainder)?; - Ok((Transform::AddUInt256(value_u256), remainder)) - } - TransformTag::AddUInt512 => { - let (value_u512, remainder) = U512::from_bytes(remainder)?; - Ok((Transform::AddUInt512(value_u512), remainder)) - } - TransformTag::AddKeys => { - let (value, remainder) = Vec::::from_bytes(remainder)?; - Ok((Transform::AddKeys(value), remainder)) - } - TransformTag::Failure => { - let (value, remainder) = String::from_bytes(remainder)?; - Ok((Transform::Failure(value), remainder)) - } - TransformTag::WriteBid => { - let (bid, remainder) = Bid::from_bytes(remainder)?; - Ok((Transform::WriteBid(Box::new(bid)), remainder)) - } - TransformTag::WriteWithdraw => { - let (withdraw_purses, remainder) = - as FromBytes>::from_bytes(remainder)?; - Ok((Transform::WriteWithdraw(withdraw_purses), remainder)) - } - TransformTag::WriteUnbonding => { - let (unbonding_purses, remainder) = - as FromBytes>::from_bytes(remainder)?; - Ok((Transform::WriteUnbonding(unbonding_purses), remainder)) - } - TransformTag::Prune => Ok((Transform::Prune, remainder)), - } - } -} - -impl Distribution for Standard { - fn sample(&self, rng: &mut R) -> Transform { - // TODO - include WriteDeployInfo and WriteTransfer as options - match rng.gen_range(0..14) { - 0 => Transform::Identity, - 1 => Transform::WriteCLValue(CLValue::from_t(true).unwrap()), - 2 => Transform::WriteAccount(AccountHash::new(rng.gen())), - 3 => Transform::WriteContractWasm, - 4 => Transform::WriteContract, - 5 => Transform::WriteContractPackage, - 6 => Transform::AddInt32(rng.gen()), - 7 => Transform::AddUInt64(rng.gen()), - 8 => Transform::AddUInt128(rng.gen::().into()), - 9 => Transform::AddUInt256(rng.gen::().into()), - 10 => Transform::AddUInt512(rng.gen::().into()), - 11 => { - let mut named_keys = Vec::new(); - for _ in 0..rng.gen_range(1..6) { - named_keys.push(NamedKey { - name: rng.gen::().to_string(), - key: rng.gen::().to_string(), - }); - } - Transform::AddKeys(named_keys) - } - 12 => Transform::Failure(rng.gen::().to_string()), - 13 => Transform::Prune, - _ => unreachable!(), - } - } -} - -#[cfg(test)] -mod tests { - use rand::{rngs::SmallRng, Rng, SeedableRng}; - - use super::*; - - fn get_rng() -> SmallRng { - let mut seed = [0u8; 32]; - getrandom::getrandom(seed.as_mut()).unwrap(); - SmallRng::from_seed(seed) - } - - #[test] - fn bytesrepr_test_transform() { - let mut rng = get_rng(); - let transform: Transform = rng.gen(); - bytesrepr::test_serialization_roundtrip(&transform); - } - - #[test] - fn bytesrepr_test_execution_result() { - let mut rng = get_rng(); - let execution_result: ExecutionResult = rng.gen(); - bytesrepr::test_serialization_roundtrip(&execution_result); - } -} diff --git a/casper_types/src/file_utils.rs b/casper_types/src/file_utils.rs deleted file mode 100644 index 775a7315..00000000 --- a/casper_types/src/file_utils.rs +++ /dev/null @@ -1,77 +0,0 @@ -//! Utilities for handling reading from and writing to files. - -use std::{ - fs, - io::{self, Write}, - os::unix::fs::OpenOptionsExt, - path::{Path, PathBuf}, -}; - -use thiserror::Error; - -/// Error reading a file. -#[derive(Debug, Error)] -#[error("could not read '{0}': {error}", .path.display())] -pub struct ReadFileError { - /// Path that failed to be read. - path: PathBuf, - /// The underlying OS error. - #[source] - error: io::Error, -} - -/// Error writing a file -#[derive(Debug, Error)] -#[error("could not write to '{0}': {error}", .path.display())] -pub struct WriteFileError { - /// Path that failed to be written to. - path: PathBuf, - /// The underlying OS error. - #[source] - error: io::Error, -} - -/// Read complete at `path` into memory. -/// -/// Wraps `fs::read`, but preserves the filename for better error printing. -pub fn read_file>(filename: P) -> Result, ReadFileError> { - let path = filename.as_ref(); - fs::read(path).map_err(|error| ReadFileError { - path: path.to_owned(), - error, - }) -} - -/// Write data to `path`. -/// -/// Wraps `fs::write`, but preserves the filename for better error printing. -pub(crate) fn write_file, B: AsRef<[u8]>>( - filename: P, - data: B, -) -> Result<(), WriteFileError> { - let path = filename.as_ref(); - fs::write(path, data.as_ref()).map_err(|error| WriteFileError { - path: path.to_owned(), - error, - }) -} - -/// Writes data to `path`, ensuring only the owner can read or write it. -/// -/// Otherwise functions like [`write_file`]. -pub(crate) fn write_private_file, B: AsRef<[u8]>>( - filename: P, - data: B, -) -> Result<(), WriteFileError> { - let path = filename.as_ref(); - fs::OpenOptions::new() - .write(true) - .create(true) - .mode(0o600) - .open(path) - .and_then(|mut file| file.write_all(data.as_ref())) - .map_err(|error| WriteFileError { - path: path.to_owned(), - error, - }) -} diff --git a/casper_types/src/gas.rs b/casper_types/src/gas.rs deleted file mode 100644 index 0d0d1a40..00000000 --- a/casper_types/src/gas.rs +++ /dev/null @@ -1,232 +0,0 @@ -//! The `gas` module is used for working with Gas including converting to and from Motes. - -use core::{ - fmt, - iter::Sum, - ops::{Add, AddAssign, Div, Mul, Sub}, -}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -use num::Zero; -use serde::{Deserialize, Serialize}; - -use crate::{Motes, U512}; - -/// The `Gas` struct represents a `U512` amount of gas. -#[derive(Debug, Default, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -pub struct Gas(U512); - -impl Gas { - /// Constructs a new `Gas`. - pub fn new(value: U512) -> Self { - Gas(value) - } - - /// Returns the inner `U512` value. - pub fn value(&self) -> U512 { - self.0 - } - - /// Converts the given `motes` to `Gas` by dividing them by `conv_rate`. - /// - /// Returns `None` if `conv_rate == 0`. - pub fn from_motes(motes: Motes, conv_rate: u64) -> Option { - motes - .value() - .checked_div(U512::from(conv_rate)) - .map(Self::new) - } - - /// Checked integer addition. Computes `self + rhs`, returning `None` if overflow occurred. - pub fn checked_add(&self, rhs: Self) -> Option { - self.0.checked_add(rhs.value()).map(Self::new) - } - - /// Checked integer subtraction. Computes `self - rhs`, returning `None` if overflow occurred. - pub fn checked_sub(&self, rhs: Self) -> Option { - self.0.checked_sub(rhs.value()).map(Self::new) - } -} - -impl fmt::Display for Gas { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{:?}", self.0) - } -} - -impl Add for Gas { - type Output = Gas; - - fn add(self, rhs: Self) -> Self::Output { - let val = self.value() + rhs.value(); - Gas::new(val) - } -} - -impl Sub for Gas { - type Output = Gas; - - fn sub(self, rhs: Self) -> Self::Output { - let val = self.value() - rhs.value(); - Gas::new(val) - } -} - -impl Div for Gas { - type Output = Gas; - - fn div(self, rhs: Self) -> Self::Output { - let val = self.value() / rhs.value(); - Gas::new(val) - } -} - -impl Mul for Gas { - type Output = Gas; - - fn mul(self, rhs: Self) -> Self::Output { - let val = self.value() * rhs.value(); - Gas::new(val) - } -} - -impl AddAssign for Gas { - fn add_assign(&mut self, rhs: Self) { - self.0 += rhs.0 - } -} - -impl Zero for Gas { - fn zero() -> Self { - Gas::new(U512::zero()) - } - - fn is_zero(&self) -> bool { - self.0.is_zero() - } -} - -impl Sum for Gas { - fn sum>(iter: I) -> Self { - iter.fold(Gas::zero(), Add::add) - } -} - -impl From for Gas { - fn from(gas: u32) -> Self { - let gas_u512: U512 = gas.into(); - Gas::new(gas_u512) - } -} - -impl From for Gas { - fn from(gas: u64) -> Self { - let gas_u512: U512 = gas.into(); - Gas::new(gas_u512) - } -} - -#[cfg(test)] -mod tests { - use crate::U512; - - use crate::{Gas, Motes}; - - #[test] - fn should_be_able_to_get_instance_of_gas() { - let initial_value = 1; - let gas = Gas::new(U512::from(initial_value)); - assert_eq!( - initial_value, - gas.value().as_u64(), - "should have equal value" - ) - } - - #[test] - fn should_be_able_to_compare_two_instances_of_gas() { - let left_gas = Gas::new(U512::from(1)); - let right_gas = Gas::new(U512::from(1)); - assert_eq!(left_gas, right_gas, "should be equal"); - let right_gas = Gas::new(U512::from(2)); - assert_ne!(left_gas, right_gas, "should not be equal") - } - - #[test] - fn should_be_able_to_add_two_instances_of_gas() { - let left_gas = Gas::new(U512::from(1)); - let right_gas = Gas::new(U512::from(1)); - let expected_gas = Gas::new(U512::from(2)); - assert_eq!((left_gas + right_gas), expected_gas, "should be equal") - } - - #[test] - fn should_be_able_to_subtract_two_instances_of_gas() { - let left_gas = Gas::new(U512::from(1)); - let right_gas = Gas::new(U512::from(1)); - let expected_gas = Gas::new(U512::from(0)); - assert_eq!((left_gas - right_gas), expected_gas, "should be equal") - } - - #[test] - fn should_be_able_to_multiply_two_instances_of_gas() { - let left_gas = Gas::new(U512::from(100)); - let right_gas = Gas::new(U512::from(10)); - let expected_gas = Gas::new(U512::from(1000)); - assert_eq!((left_gas * right_gas), expected_gas, "should be equal") - } - - #[test] - fn should_be_able_to_divide_two_instances_of_gas() { - let left_gas = Gas::new(U512::from(1000)); - let right_gas = Gas::new(U512::from(100)); - let expected_gas = Gas::new(U512::from(10)); - assert_eq!((left_gas / right_gas), expected_gas, "should be equal") - } - - #[test] - fn should_be_able_to_convert_from_mote() { - let mote = Motes::new(U512::from(100)); - let gas = Gas::from_motes(mote, 10).expect("should have gas"); - let expected_gas = Gas::new(U512::from(10)); - assert_eq!(gas, expected_gas, "should be equal") - } - - #[test] - fn should_be_able_to_default() { - let gas = Gas::default(); - let expected_gas = Gas::new(U512::from(0)); - assert_eq!(gas, expected_gas, "should be equal") - } - - #[test] - fn should_be_able_to_compare_relative_value() { - let left_gas = Gas::new(U512::from(100)); - let right_gas = Gas::new(U512::from(10)); - assert!(left_gas > right_gas, "should be gt"); - let right_gas = Gas::new(U512::from(100)); - assert!(left_gas >= right_gas, "should be gte"); - assert!(left_gas <= right_gas, "should be lte"); - let left_gas = Gas::new(U512::from(10)); - assert!(left_gas < right_gas, "should be lt"); - } - - #[test] - fn should_default() { - let left_gas = Gas::new(U512::from(0)); - let right_gas = Gas::default(); - assert_eq!(left_gas, right_gas, "should be equal"); - let u512 = U512::zero(); - assert_eq!(left_gas.value(), u512, "should be equal"); - } - - #[test] - fn should_support_checked_div_from_motes() { - let motes = Motes::new(U512::zero()); - let conv_rate = 0; - let maybe = Gas::from_motes(motes, conv_rate); - assert!(maybe.is_none(), "should be none due to divide by zero"); - } -} diff --git a/casper_types/src/gens.rs b/casper_types/src/gens.rs deleted file mode 100644 index 94b3733c..00000000 --- a/casper_types/src/gens.rs +++ /dev/null @@ -1,531 +0,0 @@ -//! Contains functions for generating arbitrary values for use by -//! [`Proptest`](https://crates.io/crates/proptest). -#![allow(missing_docs)] - -use alloc::{boxed::Box, string::String, vec}; - -use proptest::{ - array, bits, bool, - collection::{self, SizeRange}, - option, - prelude::*, - result, -}; - -use crate::{ - account::{gens::account_arb, AccountHash, Weight}, - contracts::{ - ContractPackageStatus, ContractVersions, DisabledVersions, Groups, NamedKeys, Parameters, - }, - crypto::gens::public_key_arb_no_system, - system::auction::{ - gens::era_info_arb, Bid, DelegationRate, Delegator, UnbondingPurse, WithdrawPurse, - DELEGATION_RATE_DENOMINATOR, - }, - transfer::TransferAddr, - AccessRights, CLType, CLValue, Contract, ContractHash, ContractPackage, ContractVersionKey, - ContractWasm, EntryPoint, EntryPointAccess, EntryPointType, EntryPoints, EraId, Group, Key, - NamedArg, Parameter, Phase, ProtocolVersion, SemVer, StoredValue, URef, U128, U256, U512, -}; - -use crate::deploy_info::gens::{deploy_hash_arb, transfer_addr_arb}; -pub use crate::{deploy_info::gens::deploy_info_arb, transfer::gens::transfer_arb}; - -pub fn u8_slice_32() -> impl Strategy { - collection::vec(any::(), 32).prop_map(|b| { - let mut res = [0u8; 32]; - res.clone_from_slice(b.as_slice()); - res - }) -} - -pub fn u2_slice_32() -> impl Strategy { - array::uniform32(any::()).prop_map(|mut arr| { - for byte in arr.iter_mut() { - *byte &= 0b11; - } - arr - }) -} - -pub fn named_keys_arb(depth: usize) -> impl Strategy { - collection::btree_map("\\PC*", key_arb(), depth) -} - -pub fn access_rights_arb() -> impl Strategy { - prop_oneof![ - Just(AccessRights::NONE), - Just(AccessRights::READ), - Just(AccessRights::ADD), - Just(AccessRights::WRITE), - Just(AccessRights::READ_ADD), - Just(AccessRights::READ_WRITE), - Just(AccessRights::ADD_WRITE), - Just(AccessRights::READ_ADD_WRITE), - ] -} - -pub fn phase_arb() -> impl Strategy { - prop_oneof![ - Just(Phase::Payment), - Just(Phase::Session), - Just(Phase::FinalizePayment), - ] -} - -pub fn uref_arb() -> impl Strategy { - (array::uniform32(bits::u8::ANY), access_rights_arb()) - .prop_map(|(id, access_rights)| URef::new(id, access_rights)) -} - -pub fn era_id_arb() -> impl Strategy { - any::().prop_map(EraId::from) -} - -pub fn key_arb() -> impl Strategy { - prop_oneof![ - account_hash_arb().prop_map(Key::Account), - u8_slice_32().prop_map(Key::Hash), - uref_arb().prop_map(Key::URef), - transfer_addr_arb().prop_map(Key::Transfer), - deploy_hash_arb().prop_map(Key::DeployInfo), - era_id_arb().prop_map(Key::EraInfo), - uref_arb().prop_map(|uref| Key::Balance(uref.addr())), - account_hash_arb().prop_map(Key::Bid), - account_hash_arb().prop_map(Key::Withdraw), - u8_slice_32().prop_map(Key::Dictionary), - Just(Key::EraSummary), - ] -} - -pub fn colliding_key_arb() -> impl Strategy { - prop_oneof![ - u2_slice_32().prop_map(|bytes| Key::Account(AccountHash::new(bytes))), - u2_slice_32().prop_map(Key::Hash), - u2_slice_32().prop_map(|bytes| Key::URef(URef::new(bytes, AccessRights::NONE))), - u2_slice_32().prop_map(|bytes| Key::Transfer(TransferAddr::new(bytes))), - u2_slice_32().prop_map(Key::Dictionary), - ] -} - -pub fn account_hash_arb() -> impl Strategy { - u8_slice_32().prop_map(AccountHash::new) -} - -pub fn weight_arb() -> impl Strategy { - any::().prop_map(Weight::new) -} - -pub fn sem_ver_arb() -> impl Strategy { - (any::(), any::(), any::()) - .prop_map(|(major, minor, patch)| SemVer::new(major, minor, patch)) -} - -pub fn protocol_version_arb() -> impl Strategy { - sem_ver_arb().prop_map(ProtocolVersion::new) -} - -pub fn u128_arb() -> impl Strategy { - collection::vec(any::(), 0..16).prop_map(|b| U128::from_little_endian(b.as_slice())) -} - -pub fn u256_arb() -> impl Strategy { - collection::vec(any::(), 0..32).prop_map(|b| U256::from_little_endian(b.as_slice())) -} - -pub fn u512_arb() -> impl Strategy { - prop_oneof![ - 1 => Just(U512::zero()), - 8 => collection::vec(any::(), 0..64).prop_map(|b| U512::from_little_endian(b.as_slice())), - 1 => Just(U512::MAX), - ] -} - -pub fn cl_simple_type_arb() -> impl Strategy { - prop_oneof![ - Just(CLType::Bool), - Just(CLType::I32), - Just(CLType::I64), - Just(CLType::U8), - Just(CLType::U32), - Just(CLType::U64), - Just(CLType::U128), - Just(CLType::U256), - Just(CLType::U512), - Just(CLType::Unit), - Just(CLType::String), - Just(CLType::Key), - Just(CLType::URef), - ] -} - -pub fn cl_type_arb() -> impl Strategy { - cl_simple_type_arb().prop_recursive(4, 16, 8, |element| { - prop_oneof![ - // We want to produce basic types too - element.clone(), - // For complex type - element - .clone() - .prop_map(|val| CLType::Option(Box::new(val))), - element.clone().prop_map(|val| CLType::List(Box::new(val))), - // Realistic Result type generator: ok is anything recursive, err is simple type - (element.clone(), cl_simple_type_arb()).prop_map(|(ok, err)| CLType::Result { - ok: Box::new(ok), - err: Box::new(err) - }), - // Realistic Map type generator: key is simple type, value is complex recursive type - (cl_simple_type_arb(), element.clone()).prop_map(|(key, value)| CLType::Map { - key: Box::new(key), - value: Box::new(value) - }), - // Various tuples - element - .clone() - .prop_map(|cl_type| CLType::Tuple1([Box::new(cl_type)])), - (element.clone(), element.clone()).prop_map(|(cl_type1, cl_type2)| CLType::Tuple2([ - Box::new(cl_type1), - Box::new(cl_type2) - ])), - (element.clone(), element.clone(), element).prop_map( - |(cl_type1, cl_type2, cl_type3)| CLType::Tuple3([ - Box::new(cl_type1), - Box::new(cl_type2), - Box::new(cl_type3) - ]) - ), - ] - }) -} - -pub fn cl_value_arb() -> impl Strategy { - // If compiler brings you here it most probably means you've added a variant to `CLType` enum - // but forgot to add generator for it. - let stub: Option = None; - if let Some(cl_type) = stub { - match cl_type { - CLType::Bool - | CLType::I32 - | CLType::I64 - | CLType::U8 - | CLType::U32 - | CLType::U64 - | CLType::U128 - | CLType::U256 - | CLType::U512 - | CLType::Unit - | CLType::String - | CLType::Key - | CLType::URef - | CLType::PublicKey - | CLType::Option(_) - | CLType::List(_) - | CLType::ByteArray(..) - | CLType::Result { .. } - | CLType::Map { .. } - | CLType::Tuple1(_) - | CLType::Tuple2(_) - | CLType::Tuple3(_) - | CLType::Any => (), - } - }; - - prop_oneof![ - Just(CLValue::from_t(()).expect("should create CLValue")), - any::().prop_map(|x| CLValue::from_t(x).expect("should create CLValue")), - any::().prop_map(|x| CLValue::from_t(x).expect("should create CLValue")), - any::().prop_map(|x| CLValue::from_t(x).expect("should create CLValue")), - any::().prop_map(|x| CLValue::from_t(x).expect("should create CLValue")), - any::().prop_map(|x| CLValue::from_t(x).expect("should create CLValue")), - any::().prop_map(|x| CLValue::from_t(x).expect("should create CLValue")), - u128_arb().prop_map(|x| CLValue::from_t(x).expect("should create CLValue")), - u256_arb().prop_map(|x| CLValue::from_t(x).expect("should create CLValue")), - u512_arb().prop_map(|x| CLValue::from_t(x).expect("should create CLValue")), - key_arb().prop_map(|x| CLValue::from_t(x).expect("should create CLValue")), - uref_arb().prop_map(|x| CLValue::from_t(x).expect("should create CLValue")), - ".*".prop_map(|x: String| CLValue::from_t(x).expect("should create CLValue")), - option::of(any::()).prop_map(|x| CLValue::from_t(x).expect("should create CLValue")), - collection::vec(uref_arb(), 0..100) - .prop_map(|x| CLValue::from_t(x).expect("should create CLValue")), - result::maybe_err(key_arb(), ".*") - .prop_map(|x| CLValue::from_t(x).expect("should create CLValue")), - collection::btree_map(".*", u512_arb(), 0..100) - .prop_map(|x| CLValue::from_t(x).expect("should create CLValue")), - (any::()).prop_map(|x| CLValue::from_t(x).expect("should create CLValue")), - (any::(), any::()) - .prop_map(|x| CLValue::from_t(x).expect("should create CLValue")), - (any::(), any::(), any::()) - .prop_map(|x| CLValue::from_t(x).expect("should create CLValue")), - // Fixed lists of any size - any::().prop_map(|len| CLValue::from_t([len; 32]).expect("should create CLValue")), - ] -} - -pub fn result_arb() -> impl Strategy> { - result::maybe_ok(any::(), any::()) -} - -pub fn named_args_arb() -> impl Strategy { - (".*", cl_value_arb()).prop_map(|(name, value)| NamedArg::new(name, value)) -} - -pub fn group_arb() -> impl Strategy { - ".*".prop_map(Group::new) -} - -pub fn entry_point_access_arb() -> impl Strategy { - prop_oneof![ - Just(EntryPointAccess::Public), - collection::vec(group_arb(), 0..32).prop_map(EntryPointAccess::Groups), - ] -} - -pub fn entry_point_type_arb() -> impl Strategy { - prop_oneof![ - Just(EntryPointType::Session), - Just(EntryPointType::Contract), - ] -} - -pub fn parameter_arb() -> impl Strategy { - (".*", cl_type_arb()).prop_map(|(name, cl_type)| Parameter::new(name, cl_type)) -} - -pub fn parameters_arb() -> impl Strategy { - collection::vec(parameter_arb(), 0..10) -} - -pub fn entry_point_arb() -> impl Strategy { - ( - ".*", - parameters_arb(), - entry_point_type_arb(), - entry_point_access_arb(), - cl_type_arb(), - ) - .prop_map( - |(name, parameters, entry_point_type, entry_point_access, ret)| { - EntryPoint::new(name, parameters, ret, entry_point_access, entry_point_type) - }, - ) -} - -pub fn entry_points_arb() -> impl Strategy { - collection::vec(entry_point_arb(), 1..10).prop_map(EntryPoints::from) -} - -pub fn contract_arb() -> impl Strategy { - ( - protocol_version_arb(), - entry_points_arb(), - u8_slice_32(), - u8_slice_32(), - named_keys_arb(20), - ) - .prop_map( - |( - protocol_version, - entry_points, - contract_package_hash_arb, - contract_wasm_hash, - named_keys, - )| { - Contract::new( - contract_package_hash_arb.into(), - contract_wasm_hash.into(), - named_keys, - entry_points, - protocol_version, - ) - }, - ) -} - -pub fn contract_wasm_arb() -> impl Strategy { - collection::vec(any::(), 1..1000).prop_map(ContractWasm::new) -} - -pub fn contract_version_key_arb() -> impl Strategy { - (1..32u32, 1..1000u32) - .prop_map(|(major, contract_ver)| ContractVersionKey::new(major, contract_ver)) -} - -pub fn contract_versions_arb() -> impl Strategy { - collection::btree_map( - contract_version_key_arb(), - u8_slice_32().prop_map(ContractHash::new), - 1..5, - ) -} - -pub fn disabled_versions_arb() -> impl Strategy { - collection::btree_set(contract_version_key_arb(), 0..5) -} - -pub fn groups_arb() -> impl Strategy { - collection::btree_map(group_arb(), collection::btree_set(uref_arb(), 1..10), 0..5) -} - -pub fn contract_package_arb() -> impl Strategy { - ( - uref_arb(), - contract_versions_arb(), - disabled_versions_arb(), - groups_arb(), - ) - .prop_map(|(access_key, versions, disabled_versions, groups)| { - ContractPackage::new( - access_key, - versions, - disabled_versions, - groups, - ContractPackageStatus::default(), - ) - }) -} - -fn delegator_arb() -> impl Strategy { - ( - public_key_arb_no_system(), - u512_arb(), - uref_arb(), - public_key_arb_no_system(), - ) - .prop_map( - |(delegator_pk, staked_amount, bonding_purse, validator_pk)| { - Delegator::unlocked(delegator_pk, staked_amount, bonding_purse, validator_pk) - }, - ) -} - -fn delegation_rate_arb() -> impl Strategy { - 0..=DELEGATION_RATE_DENOMINATOR // Maximum, allowed value for delegation rate. -} - -pub(crate) fn bid_arb(delegations_len: impl Into) -> impl Strategy { - ( - public_key_arb_no_system(), - uref_arb(), - u512_arb(), - delegation_rate_arb(), - bool::ANY, - collection::vec(delegator_arb(), delegations_len), - ) - .prop_map( - |( - validator_public_key, - bonding_purse, - staked_amount, - delegation_rate, - is_locked, - new_delegators, - )| { - let mut bid = if is_locked { - Bid::locked( - validator_public_key, - bonding_purse, - staked_amount, - delegation_rate, - 1u64, - ) - } else { - Bid::unlocked( - validator_public_key, - bonding_purse, - staked_amount, - delegation_rate, - ) - }; - let delegators = bid.delegators_mut(); - new_delegators.into_iter().for_each(|delegator| { - assert!(delegators - .insert(delegator.delegator_public_key().clone(), delegator) - .is_none()); - }); - bid - }, - ) -} - -fn withdraw_arb() -> impl Strategy { - ( - uref_arb(), - public_key_arb_no_system(), - public_key_arb_no_system(), - era_id_arb(), - u512_arb(), - ) - .prop_map(|(bonding_purse, validator_pk, unbonder_pk, era, amount)| { - WithdrawPurse::new(bonding_purse, validator_pk, unbonder_pk, era, amount) - }) -} - -fn withdraws_arb(size: impl Into) -> impl Strategy> { - collection::vec(withdraw_arb(), size) -} - -fn unbonding_arb() -> impl Strategy { - ( - uref_arb(), - public_key_arb_no_system(), - public_key_arb_no_system(), - era_id_arb(), - u512_arb(), - option::of(public_key_arb_no_system()), - ) - .prop_map( - |( - bonding_purse, - validator_public_key, - unbonder_public_key, - era, - amount, - new_validator, - )| { - UnbondingPurse::new( - bonding_purse, - validator_public_key, - unbonder_public_key, - era, - amount, - new_validator, - ) - }, - ) -} - -fn unbondings_arb(size: impl Into) -> impl Strategy> { - collection::vec(unbonding_arb(), size) -} - -pub fn stored_value_arb() -> impl Strategy { - prop_oneof![ - cl_value_arb().prop_map(StoredValue::CLValue), - account_arb().prop_map(StoredValue::Account), - contract_wasm_arb().prop_map(StoredValue::ContractWasm), - contract_arb().prop_map(StoredValue::Contract), - contract_package_arb().prop_map(StoredValue::ContractPackage), - transfer_arb().prop_map(StoredValue::Transfer), - deploy_info_arb().prop_map(StoredValue::DeployInfo), - era_info_arb(1..10).prop_map(StoredValue::EraInfo), - bid_arb(0..100).prop_map(|bid| StoredValue::Bid(Box::new(bid))), - withdraws_arb(1..50).prop_map(StoredValue::Withdraw), - unbondings_arb(1..50).prop_map(StoredValue::Unbonding) - ] - .prop_map(|stored_value| - // The following match statement is here only to make sure - // we don't forget to update the generator when a new variant is added. - match stored_value { - StoredValue::CLValue(_) => stored_value, - StoredValue::Account(_) => stored_value, - StoredValue::ContractWasm(_) => stored_value, - StoredValue::Contract(_) => stored_value, - StoredValue::ContractPackage(_) => stored_value, - StoredValue::Transfer(_) => stored_value, - StoredValue::DeployInfo(_) => stored_value, - StoredValue::EraInfo(_) => stored_value, - StoredValue::Bid(_) => stored_value, - StoredValue::Withdraw(_) => stored_value, - StoredValue::Unbonding(_) => stored_value, - }) -} diff --git a/casper_types/src/json_pretty_printer.rs b/casper_types/src/json_pretty_printer.rs deleted file mode 100644 index 3648d38c..00000000 --- a/casper_types/src/json_pretty_printer.rs +++ /dev/null @@ -1,291 +0,0 @@ -extern crate alloc; - -use alloc::{format, string::String, vec::Vec}; - -use serde::Serialize; -use serde_json::{json, Value}; - -const MAX_STRING_LEN: usize = 150; - -/// Represents the information about a substring found in a string. -#[derive(Debug)] -struct SubstringSpec { - /// Index of the first character. - start_index: usize, - /// Length of the substring. - length: usize, -} - -impl SubstringSpec { - /// Constructs a new StringSpec with the given start index and length. - fn new(start_index: usize, length: usize) -> Self { - Self { - start_index, - length, - } - } -} - -/// Serializes the given data structure as a pretty-printed `String` of JSON using -/// `serde_json::to_string_pretty()`, but after first reducing any large hex-string values. -/// -/// A large hex-string is one containing only hex characters and which is over `MAX_STRING_LEN`. -/// Such hex-strings will be replaced by an indication of the number of chars redacted, for example -/// `[130 hex chars]`. -pub fn json_pretty_print(value: &T) -> serde_json::Result -where - T: ?Sized + Serialize, -{ - let mut json_value = json!(value); - shorten_string_field(&mut json_value); - - serde_json::to_string_pretty(&json_value) -} - -/// Searches the given string for all occurrences of hex substrings -/// that are longer than the specified `max_len`. -fn find_hex_strings_longer_than(string: &str, max_len: usize) -> Vec { - let mut ranges_to_remove = Vec::new(); - let mut start_index = 0; - let mut contiguous_hex_count = 0; - - // Record all large hex-strings' start positions and lengths. - for (index, char) in string.char_indices() { - if char.is_ascii_hexdigit() { - if contiguous_hex_count == 0 { - // This is the start of a new hex-string. - start_index = index; - } - contiguous_hex_count += 1; - } else if contiguous_hex_count != 0 { - // This is the end of a hex-string: if it's too long, record it. - if contiguous_hex_count > max_len { - ranges_to_remove.push(SubstringSpec::new(start_index, contiguous_hex_count)); - } - contiguous_hex_count = 0; - } - } - // If the string contains a large hex-string at the end, record it now. - if contiguous_hex_count > max_len { - ranges_to_remove.push(SubstringSpec::new(start_index, contiguous_hex_count)); - } - ranges_to_remove -} - -fn shorten_string_field(value: &mut Value) { - match value { - Value::String(string) => { - // Iterate over the ranges to remove from last to first so each - // replacement start index remains valid. - find_hex_strings_longer_than(string, MAX_STRING_LEN) - .into_iter() - .rev() - .for_each( - |SubstringSpec { - start_index, - length, - }| { - let range = start_index..(start_index + length); - string.replace_range(range, &format!("[{} hex chars]", length)); - }, - ) - } - Value::Array(values) => { - for value in values { - shorten_string_field(value); - } - } - Value::Object(map) => { - for map_value in map.values_mut() { - shorten_string_field(map_value); - } - } - Value::Null | Value::Bool(_) | Value::Number(_) => {} - } -} - -#[cfg(test)] -mod tests { - use super::*; - - fn hex_string(length: usize) -> String { - "0123456789abcdef".chars().cycle().take(length).collect() - } - - impl PartialEq<(usize, usize)> for SubstringSpec { - fn eq(&self, other: &(usize, usize)) -> bool { - self.start_index == other.0 && self.length == other.1 - } - } - - #[test] - fn finds_hex_strings_longer_than() { - const TESTING_LEN: usize = 3; - - let input = "01234"; - let expected = vec![(0, 5)]; - let actual = find_hex_strings_longer_than(input, TESTING_LEN); - assert_eq!(actual, expected); - - let input = "01234-0123"; - let expected = vec![(0, 5), (6, 4)]; - let actual = find_hex_strings_longer_than(input, TESTING_LEN); - assert_eq!(actual, expected); - - let input = "012-34-0123"; - let expected = vec![(7, 4)]; - let actual = find_hex_strings_longer_than(input, TESTING_LEN); - assert_eq!(actual, expected); - - let input = "012-34-01-23"; - let expected: Vec<(usize, usize)> = vec![]; - let actual = find_hex_strings_longer_than(input, TESTING_LEN); - assert_eq!(actual, expected); - - let input = "0"; - let expected: Vec<(usize, usize)> = vec![]; - let actual = find_hex_strings_longer_than(input, TESTING_LEN); - assert_eq!(actual, expected); - - let input = ""; - let expected: Vec<(usize, usize)> = vec![]; - let actual = find_hex_strings_longer_than(input, TESTING_LEN); - assert_eq!(actual, expected); - } - - #[test] - fn respects_length() { - let input = "I like beef"; - let expected = vec![(7, 4)]; - let actual = find_hex_strings_longer_than(input, 3); - assert_eq!(actual, expected); - - let input = "I like beef"; - let expected: Vec<(usize, usize)> = vec![]; - let actual = find_hex_strings_longer_than(input, 1000); - assert_eq!(actual, expected); - } - - #[test] - fn should_shorten_long_strings() { - let max_unshortened_hex_string = hex_string(MAX_STRING_LEN); - let long_hex_string = hex_string(MAX_STRING_LEN + 1); - let long_non_hex_string: String = "g".repeat(MAX_STRING_LEN + 1); - let long_hex_substring = format!("a-{}-b", hex_string(MAX_STRING_LEN + 1)); - let multiple_long_hex_substrings = - format!("a: {0}, b: {0}, c: {0}", hex_string(MAX_STRING_LEN + 1)); - - let mut long_strings: Vec = vec![]; - for i in 1..=5 { - long_strings.push("a".repeat(MAX_STRING_LEN + i)); - } - let value = json!({ - "field_1": Option::::None, - "field_2": true, - "field_3": 123, - "field_4": max_unshortened_hex_string, - "field_5": ["short string value", long_hex_string], - "field_6": { - "f1": Option::::None, - "f2": false, - "f3": -123, - "f4": long_non_hex_string, - "f5": ["short string value", long_hex_substring], - "f6": { - "final long string": multiple_long_hex_substrings - } - } - }); - - let expected = r#"{ - "field_1": null, - "field_2": true, - "field_3": 123, - "field_4": "0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef012345", - "field_5": [ - "short string value", - "[151 hex chars]" - ], - "field_6": { - "f1": null, - "f2": false, - "f3": -123, - "f4": "ggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggg", - "f5": [ - "short string value", - "a-[151 hex chars]-b" - ], - "f6": { - "final long string": "a: [151 hex chars], b: [151 hex chars], c: [151 hex chars]" - } - } -}"#; - - let output = json_pretty_print(&value).unwrap(); - assert_eq!( - output, expected, - "Actual:\n{}\nExpected:\n{}\n", - output, expected - ); - } - - #[test] - fn should_not_modify_short_strings() { - let max_string: String = "a".repeat(MAX_STRING_LEN); - let value = json!({ - "field_1": Option::::None, - "field_2": true, - "field_3": 123, - "field_4": max_string, - "field_5": [ - "short string value", - "another short string" - ], - "field_6": { - "f1": Option::::None, - "f2": false, - "f3": -123, - "f4": "short", - "f5": [ - "short string value", - "another short string" - ], - "f6": { - "final string": "the last short string" - } - } - }); - - let expected = serde_json::to_string_pretty(&value).unwrap(); - let output = json_pretty_print(&value).unwrap(); - assert_eq!( - output, expected, - "Actual:\n{}\nExpected:\n{}\n", - output, expected - ); - } - - #[test] - /// Ref: https://github.com/casper-network/casper-node/issues/1456 - fn regression_1456() { - let long_string = r#"state query failed: ValueNotFound("Failed to find base key at path: Key::Account(72698d4dc715a28347b15920b09b4f0f1d633be5a33f4686d06992415b0825e2)")"#; - assert_eq!(long_string.len(), 148); - - let value = json!({ - "code": -32003, - "message": long_string, - }); - - let expected = r#"{ - "code": -32003, - "message": "state query failed: ValueNotFound(\"Failed to find base key at path: Key::Account(72698d4dc715a28347b15920b09b4f0f1d633be5a33f4686d06992415b0825e2)\")" -}"#; - - let output = json_pretty_print(&value).unwrap(); - assert_eq!( - output, expected, - "Actual:\n{}\nExpected:\n{}\n", - output, expected - ); - } -} diff --git a/casper_types/src/key.rs b/casper_types/src/key.rs deleted file mode 100644 index addede02..00000000 --- a/casper_types/src/key.rs +++ /dev/null @@ -1,1458 +0,0 @@ -//! Key types. - -use alloc::{ - format, - string::{String, ToString}, - vec::Vec, -}; - -use core::{ - convert::TryFrom, - fmt::{self, Debug, Display, Formatter}, - str::FromStr, -}; - -use blake2::{ - digest::{Update, VariableOutput}, - VarBlake2b, -}; -#[cfg(feature = "datasize")] -use datasize::DataSize; -use rand::{ - distributions::{Distribution, Standard}, - Rng, -}; -use serde::{de::Error as SerdeError, Deserialize, Deserializer, Serialize, Serializer}; - -use crate::{ - account::{self, AccountHash, ACCOUNT_HASH_LENGTH}, - bytesrepr::{self, Error, FromBytes, ToBytes, U64_SERIALIZED_LENGTH}, - checksummed_hex, - contract_wasm::ContractWasmHash, - contracts::{ContractHash, ContractPackageHash}, - uref::{self, URef, URefAddr, UREF_SERIALIZED_LENGTH}, - DeployHash, EraId, Tagged, TransferAddr, TransferFromStrError, DEPLOY_HASH_LENGTH, - TRANSFER_ADDR_LENGTH, UREF_ADDR_LENGTH, -}; - -const HASH_PREFIX: &str = "hash-"; -const DEPLOY_INFO_PREFIX: &str = "deploy-"; -const ERA_INFO_PREFIX: &str = "era-"; -const BALANCE_PREFIX: &str = "balance-"; -const BID_PREFIX: &str = "bid-"; -const WITHDRAW_PREFIX: &str = "withdraw-"; -const DICTIONARY_PREFIX: &str = "dictionary-"; -const UNBOND_PREFIX: &str = "unbond-"; -const SYSTEM_CONTRACT_REGISTRY_PREFIX: &str = "system-contract-registry-"; -const ERA_SUMMARY_PREFIX: &str = "era-summary-"; -const CHAINSPEC_REGISTRY_PREFIX: &str = "chainspec-registry-"; -const CHECKSUM_REGISTRY_PREFIX: &str = "checksum-registry-"; - -/// The number of bytes in a Blake2b hash -pub const BLAKE2B_DIGEST_LENGTH: usize = 32; -/// The number of bytes in a [`Key::Hash`]. -pub const KEY_HASH_LENGTH: usize = 32; -/// The number of bytes in a [`Key::Transfer`]. -pub const KEY_TRANSFER_LENGTH: usize = TRANSFER_ADDR_LENGTH; -/// The number of bytes in a [`Key::DeployInfo`]. -pub const KEY_DEPLOY_INFO_LENGTH: usize = DEPLOY_HASH_LENGTH; -/// The number of bytes in a [`Key::Dictionary`]. -pub const KEY_DICTIONARY_LENGTH: usize = 32; -/// The maximum length for a `dictionary_item_key`. -pub const DICTIONARY_ITEM_KEY_MAX_LENGTH: usize = 128; -const PADDING_BYTES: [u8; 32] = [0u8; 32]; -const KEY_ID_SERIALIZED_LENGTH: usize = 1; -// u8 used to determine the ID -const KEY_HASH_SERIALIZED_LENGTH: usize = KEY_ID_SERIALIZED_LENGTH + KEY_HASH_LENGTH; -const KEY_UREF_SERIALIZED_LENGTH: usize = KEY_ID_SERIALIZED_LENGTH + UREF_SERIALIZED_LENGTH; -const KEY_TRANSFER_SERIALIZED_LENGTH: usize = KEY_ID_SERIALIZED_LENGTH + KEY_TRANSFER_LENGTH; -const KEY_DEPLOY_INFO_SERIALIZED_LENGTH: usize = KEY_ID_SERIALIZED_LENGTH + KEY_DEPLOY_INFO_LENGTH; -const KEY_ERA_INFO_SERIALIZED_LENGTH: usize = KEY_ID_SERIALIZED_LENGTH + U64_SERIALIZED_LENGTH; -const KEY_BALANCE_SERIALIZED_LENGTH: usize = KEY_ID_SERIALIZED_LENGTH + UREF_ADDR_LENGTH; -const KEY_BID_SERIALIZED_LENGTH: usize = KEY_ID_SERIALIZED_LENGTH + KEY_HASH_LENGTH; -const KEY_WITHDRAW_SERIALIZED_LENGTH: usize = KEY_ID_SERIALIZED_LENGTH + KEY_HASH_LENGTH; -const KEY_UNBOND_SERIALIZED_LENGTH: usize = KEY_ID_SERIALIZED_LENGTH + KEY_HASH_LENGTH; -const KEY_DICTIONARY_SERIALIZED_LENGTH: usize = KEY_ID_SERIALIZED_LENGTH + KEY_DICTIONARY_LENGTH; -const KEY_SYSTEM_CONTRACT_REGISTRY_SERIALIZED_LENGTH: usize = - KEY_ID_SERIALIZED_LENGTH + PADDING_BYTES.len(); -const KEY_ERA_SUMMARY_SERIALIZED_LENGTH: usize = KEY_ID_SERIALIZED_LENGTH + PADDING_BYTES.len(); -const KEY_CHAINSPEC_REGISTRY_SERIALIZED_LENGTH: usize = - KEY_ID_SERIALIZED_LENGTH + PADDING_BYTES.len(); -const KEY_CHECKSUM_REGISTRY_SERIALIZED_LENGTH: usize = - KEY_ID_SERIALIZED_LENGTH + PADDING_BYTES.len(); - -/// An alias for [`Key`]s hash variant. -pub type HashAddr = [u8; KEY_HASH_LENGTH]; - -/// An alias for [`Key`]s dictionary variant. -pub type DictionaryAddr = [u8; KEY_DICTIONARY_LENGTH]; - -#[allow(missing_docs)] -#[derive(Debug, Copy, Clone, PartialOrd, Ord, PartialEq, Eq, Hash)] -#[repr(u8)] -pub enum KeyTag { - Account = 0, - Hash = 1, - URef = 2, - Transfer = 3, - DeployInfo = 4, - EraInfo = 5, - Balance = 6, - Bid = 7, - Withdraw = 8, - Dictionary = 9, - SystemContractRegistry = 10, - EraSummary = 11, - Unbond = 12, - ChainspecRegistry = 13, - ChecksumRegistry = 14, -} - -/// The type under which data (e.g. [`CLValue`](crate::CLValue)s, smart contracts, user accounts) -/// are indexed on the network. -#[repr(C)] -#[derive(PartialEq, Eq, Clone, Copy, PartialOrd, Ord, Hash)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -pub enum Key { - /// A `Key` under which a user account is stored. - Account(AccountHash), - /// A `Key` under which a smart contract is stored and which is the pseudo-hash of the - /// contract. - Hash(HashAddr), - /// A `Key` which is a [`URef`], under which most types of data can be stored. - URef(URef), - /// A `Key` under which we store a transfer. - Transfer(TransferAddr), - /// A `Key` under which we store a deploy info. - DeployInfo(DeployHash), - /// A `Key` under which we store an era info. - EraInfo(EraId), - /// A `Key` under which we store a purse balance. - Balance(URefAddr), - /// A `Key` under which we store bid information - Bid(AccountHash), - /// A `Key` under which we store withdraw information. - Withdraw(AccountHash), - /// A `Key` variant whose value is derived by hashing [`URef`]s address and arbitrary data. - Dictionary(DictionaryAddr), - /// A `Key` variant under which system contract hashes are stored. - SystemContractRegistry, - /// A `Key` under which we store current era info. - EraSummary, - /// A `Key` under which we store unbond information. - Unbond(AccountHash), - /// A `Key` variant under which chainspec and other hashes are stored. - ChainspecRegistry, - /// A `Key` variant under which we store a registry of checksums. - ChecksumRegistry, -} - -/// Errors produced when converting a `String` into a `Key`. -#[derive(Debug)] -#[non_exhaustive] -pub enum FromStrError { - /// Account parse error. - Account(account::FromStrError), - /// Hash parse error. - Hash(String), - /// URef parse error. - URef(uref::FromStrError), - /// Transfer parse error. - Transfer(TransferFromStrError), - /// DeployInfo parse error. - DeployInfo(String), - /// EraInfo parse error. - EraInfo(String), - /// Balance parse error. - Balance(String), - /// Bid parse error. - Bid(String), - /// Withdraw parse error. - Withdraw(String), - /// Dictionary parse error. - Dictionary(String), - /// System contract registry parse error. - SystemContractRegistry(String), - /// Era summary parse error. - EraSummary(String), - /// Unbond parse error. - Unbond(String), - /// Chainspec registry error. - ChainspecRegistry(String), - /// Checksum registry error. - ChecksumRegistry(String), - /// Unknown prefix. - UnknownPrefix, -} - -impl From for FromStrError { - fn from(error: account::FromStrError) -> Self { - FromStrError::Account(error) - } -} - -impl From for FromStrError { - fn from(error: TransferFromStrError) -> Self { - FromStrError::Transfer(error) - } -} - -impl From for FromStrError { - fn from(error: uref::FromStrError) -> Self { - FromStrError::URef(error) - } -} - -impl Display for FromStrError { - fn fmt(&self, f: &mut Formatter) -> fmt::Result { - match self { - FromStrError::Account(error) => write!(f, "account-key from string error: {}", error), - FromStrError::Hash(error) => write!(f, "hash-key from string error: {}", error), - FromStrError::URef(error) => write!(f, "uref-key from string error: {}", error), - FromStrError::Transfer(error) => write!(f, "transfer-key from string error: {}", error), - FromStrError::DeployInfo(error) => { - write!(f, "deploy-info-key from string error: {}", error) - } - FromStrError::EraInfo(error) => write!(f, "era-info-key from string error: {}", error), - FromStrError::Balance(error) => write!(f, "balance-key from string error: {}", error), - FromStrError::Bid(error) => write!(f, "bid-key from string error: {}", error), - FromStrError::Withdraw(error) => write!(f, "withdraw-key from string error: {}", error), - FromStrError::Dictionary(error) => { - write!(f, "dictionary-key from string error: {}", error) - } - FromStrError::SystemContractRegistry(error) => { - write!( - f, - "system-contract-registry-key from string error: {}", - error - ) - } - FromStrError::EraSummary(error) => { - write!(f, "era-summary-key from string error: {}", error) - } - FromStrError::Unbond(error) => { - write!(f, "unbond-key from string error: {}", error) - } - FromStrError::ChainspecRegistry(error) => { - write!(f, "chainspec-registry-key from string error: {}", error) - } - FromStrError::ChecksumRegistry(error) => { - write!(f, "checksum-registry-key from string error: {}", error) - } - FromStrError::UnknownPrefix => write!(f, "unknown prefix for key"), - } - } -} - -impl Key { - // This method is not intended to be used by third party crates. - #[doc(hidden)] - pub fn type_string(&self) -> String { - match self { - Key::Account(_) => String::from("Key::Account"), - Key::Hash(_) => String::from("Key::Hash"), - Key::URef(_) => String::from("Key::URef"), - Key::Transfer(_) => String::from("Key::Transfer"), - Key::DeployInfo(_) => String::from("Key::DeployInfo"), - Key::EraInfo(_) => String::from("Key::EraInfo"), - Key::Balance(_) => String::from("Key::Balance"), - Key::Bid(_) => String::from("Key::Bid"), - Key::Withdraw(_) => String::from("Key::Unbond"), - Key::Dictionary(_) => String::from("Key::Dictionary"), - Key::SystemContractRegistry => String::from("Key::SystemContractRegistry"), - Key::EraSummary => String::from("Key::EraSummary"), - Key::Unbond(_) => String::from("Key::Unbond"), - Key::ChainspecRegistry => String::from("Key::ChainspecRegistry"), - Key::ChecksumRegistry => String::from("Key::ChecksumRegistry"), - } - } - - /// Returns the maximum size a [`Key`] can be serialized into. - pub const fn max_serialized_length() -> usize { - KEY_UREF_SERIALIZED_LENGTH - } - - /// If `self` is of type [`Key::URef`], returns `self` with the - /// [`AccessRights`](crate::AccessRights) stripped from the wrapped [`URef`], otherwise - /// returns `self` unmodified. - #[must_use] - pub fn normalize(self) -> Key { - match self { - Key::URef(uref) => Key::URef(uref.remove_access_rights()), - other => other, - } - } - - /// Returns a human-readable version of `self`, with the inner bytes encoded to Base16. - pub fn to_formatted_string(self) -> String { - match self { - Key::Account(account_hash) => account_hash.to_formatted_string(), - Key::Hash(addr) => format!("{}{}", HASH_PREFIX, base16::encode_lower(&addr)), - Key::URef(uref) => uref.to_formatted_string(), - Key::Transfer(transfer_addr) => transfer_addr.to_formatted_string(), - Key::DeployInfo(addr) => { - format!( - "{}{}", - DEPLOY_INFO_PREFIX, - base16::encode_lower(addr.as_bytes()) - ) - } - Key::EraInfo(era_id) => { - format!("{}{}", ERA_INFO_PREFIX, era_id.value()) - } - Key::Balance(uref_addr) => { - format!("{}{}", BALANCE_PREFIX, base16::encode_lower(&uref_addr)) - } - Key::Bid(account_hash) => { - format!("{}{}", BID_PREFIX, base16::encode_lower(&account_hash)) - } - Key::Withdraw(account_hash) => { - format!("{}{}", WITHDRAW_PREFIX, base16::encode_lower(&account_hash)) - } - Key::Dictionary(dictionary_addr) => { - format!( - "{}{}", - DICTIONARY_PREFIX, - base16::encode_lower(&dictionary_addr) - ) - } - Key::SystemContractRegistry => { - format!( - "{}{}", - SYSTEM_CONTRACT_REGISTRY_PREFIX, - base16::encode_lower(&PADDING_BYTES) - ) - } - Key::EraSummary => { - format!( - "{}{}", - ERA_SUMMARY_PREFIX, - base16::encode_lower(&PADDING_BYTES) - ) - } - Key::Unbond(account_hash) => { - format!("{}{}", UNBOND_PREFIX, base16::encode_lower(&account_hash)) - } - Key::ChainspecRegistry => { - format!( - "{}{}", - CHAINSPEC_REGISTRY_PREFIX, - base16::encode_lower(&PADDING_BYTES) - ) - } - Key::ChecksumRegistry => { - format!( - "{}{}", - CHECKSUM_REGISTRY_PREFIX, - base16::encode_lower(&PADDING_BYTES) - ) - } - } - } - - /// Parses a string formatted as per `Self::to_formatted_string()` into a `Key`. - pub fn from_formatted_str(input: &str) -> Result { - match AccountHash::from_formatted_str(input) { - Ok(account_hash) => return Ok(Key::Account(account_hash)), - Err(account::FromStrError::InvalidPrefix) => {} - Err(error) => return Err(error.into()), - } - - if let Some(hex) = input.strip_prefix(HASH_PREFIX) { - let addr = checksummed_hex::decode(hex) - .map_err(|error| FromStrError::Hash(error.to_string()))?; - let hash_addr = HashAddr::try_from(addr.as_ref()) - .map_err(|error| FromStrError::Hash(error.to_string()))?; - return Ok(Key::Hash(hash_addr)); - } - - if let Some(hex) = input.strip_prefix(DEPLOY_INFO_PREFIX) { - let hash = checksummed_hex::decode(hex) - .map_err(|error| FromStrError::DeployInfo(error.to_string()))?; - let hash_array = <[u8; DEPLOY_HASH_LENGTH]>::try_from(hash.as_ref()) - .map_err(|error| FromStrError::DeployInfo(error.to_string()))?; - return Ok(Key::DeployInfo(DeployHash::new(hash_array))); - } - - match TransferAddr::from_formatted_str(input) { - Ok(transfer_addr) => return Ok(Key::Transfer(transfer_addr)), - Err(TransferFromStrError::InvalidPrefix) => {} - Err(error) => return Err(error.into()), - } - - match URef::from_formatted_str(input) { - Ok(uref) => return Ok(Key::URef(uref)), - Err(uref::FromStrError::InvalidPrefix) => {} - Err(error) => return Err(error.into()), - } - - if let Some(era_summary_padding) = input.strip_prefix(ERA_SUMMARY_PREFIX) { - let padded_bytes = checksummed_hex::decode(era_summary_padding) - .map_err(|error| FromStrError::EraSummary(error.to_string()))?; - let _padding: [u8; 32] = TryFrom::try_from(padded_bytes.as_ref()).map_err(|_| { - FromStrError::EraSummary("Failed to deserialize era summary key".to_string()) - })?; - return Ok(Key::EraSummary); - } - - if let Some(era_id_str) = input.strip_prefix(ERA_INFO_PREFIX) { - let era_id = EraId::from_str(era_id_str) - .map_err(|error| FromStrError::EraInfo(error.to_string()))?; - return Ok(Key::EraInfo(era_id)); - } - - if let Some(hex) = input.strip_prefix(BALANCE_PREFIX) { - let addr = checksummed_hex::decode(hex) - .map_err(|error| FromStrError::Balance(error.to_string()))?; - let uref_addr = URefAddr::try_from(addr.as_ref()) - .map_err(|error| FromStrError::Balance(error.to_string()))?; - return Ok(Key::Balance(uref_addr)); - } - - if let Some(hex) = input.strip_prefix(BID_PREFIX) { - let hash = checksummed_hex::decode(hex) - .map_err(|error| FromStrError::Bid(error.to_string()))?; - let account_hash = <[u8; ACCOUNT_HASH_LENGTH]>::try_from(hash.as_ref()) - .map_err(|error| FromStrError::Bid(error.to_string()))?; - return Ok(Key::Bid(AccountHash::new(account_hash))); - } - - if let Some(hex) = input.strip_prefix(WITHDRAW_PREFIX) { - let hash = checksummed_hex::decode(hex) - .map_err(|error| FromStrError::Withdraw(error.to_string()))?; - let account_hash = <[u8; ACCOUNT_HASH_LENGTH]>::try_from(hash.as_ref()) - .map_err(|error| FromStrError::Withdraw(error.to_string()))?; - return Ok(Key::Withdraw(AccountHash::new(account_hash))); - } - - if let Some(hex) = input.strip_prefix(UNBOND_PREFIX) { - let hash = checksummed_hex::decode(hex) - .map_err(|error| FromStrError::Unbond(error.to_string()))?; - let account_hash = <[u8; ACCOUNT_HASH_LENGTH]>::try_from(hash.as_ref()) - .map_err(|error| FromStrError::Unbond(error.to_string()))?; - return Ok(Key::Unbond(AccountHash::new(account_hash))); - } - - if let Some(dictionary_addr) = input.strip_prefix(DICTIONARY_PREFIX) { - let dictionary_addr_bytes = checksummed_hex::decode(dictionary_addr) - .map_err(|error| FromStrError::Dictionary(error.to_string()))?; - let addr = DictionaryAddr::try_from(dictionary_addr_bytes.as_ref()) - .map_err(|error| FromStrError::Dictionary(error.to_string()))?; - return Ok(Key::Dictionary(addr)); - } - - if let Some(registry_address) = input.strip_prefix(SYSTEM_CONTRACT_REGISTRY_PREFIX) { - let padded_bytes = checksummed_hex::decode(registry_address) - .map_err(|error| FromStrError::SystemContractRegistry(error.to_string()))?; - let _padding: [u8; 32] = TryFrom::try_from(padded_bytes.as_ref()).map_err(|_| { - FromStrError::SystemContractRegistry( - "Failed to deserialize system registry key".to_string(), - ) - })?; - return Ok(Key::SystemContractRegistry); - } - - if let Some(registry_address) = input.strip_prefix(CHAINSPEC_REGISTRY_PREFIX) { - let padded_bytes = checksummed_hex::decode(registry_address) - .map_err(|error| FromStrError::ChainspecRegistry(error.to_string()))?; - let _padding: [u8; 32] = TryFrom::try_from(padded_bytes.as_ref()).map_err(|_| { - FromStrError::ChainspecRegistry( - "Failed to deserialize chainspec registry key".to_string(), - ) - })?; - return Ok(Key::ChainspecRegistry); - } - - if let Some(registry_address) = input.strip_prefix(CHECKSUM_REGISTRY_PREFIX) { - let padded_bytes = checksummed_hex::decode(registry_address) - .map_err(|error| FromStrError::ChecksumRegistry(error.to_string()))?; - let _padding: [u8; 32] = TryFrom::try_from(padded_bytes.as_ref()).map_err(|_| { - FromStrError::ChecksumRegistry( - "Failed to deserialize checksum registry key".to_string(), - ) - })?; - return Ok(Key::ChecksumRegistry); - } - - Err(FromStrError::UnknownPrefix) - } - - /// Returns the inner bytes of `self` if `self` is of type [`Key::Account`], otherwise returns - /// `None`. - pub fn into_account(self) -> Option { - match self { - Key::Account(bytes) => Some(bytes), - _ => None, - } - } - - /// Returns the inner bytes of `self` if `self` is of type [`Key::Hash`], otherwise returns - /// `None`. - pub fn into_hash(self) -> Option { - match self { - Key::Hash(hash) => Some(hash), - _ => None, - } - } - - /// Returns a reference to the inner [`URef`] if `self` is of type [`Key::URef`], otherwise - /// returns `None`. - pub fn as_uref(&self) -> Option<&URef> { - match self { - Key::URef(uref) => Some(uref), - _ => None, - } - } - - /// Returns a reference to the inner [`URef`] if `self` is of type [`Key::URef`], otherwise - /// returns `None`. - pub fn as_uref_mut(&mut self) -> Option<&mut URef> { - match self { - Key::URef(uref) => Some(uref), - _ => None, - } - } - - /// Returns a reference to the inner `URefAddr` if `self` is of type [`Key::Balance`], - /// otherwise returns `None`. - pub fn as_balance(&self) -> Option<&URefAddr> { - if let Self::Balance(v) = self { - Some(v) - } else { - None - } - } - - /// Returns the inner [`URef`] if `self` is of type [`Key::URef`], otherwise returns `None`. - pub fn into_uref(self) -> Option { - match self { - Key::URef(uref) => Some(uref), - _ => None, - } - } - - /// Returns a reference to the inner [`DictionaryAddr`] if `self` is of type - /// [`Key::Dictionary`], otherwise returns `None`. - pub fn as_dictionary(&self) -> Option<&DictionaryAddr> { - match self { - Key::Dictionary(v) => Some(v), - _ => None, - } - } - - /// Casts a [`Key::URef`] to a [`Key::Hash`] - pub fn uref_to_hash(&self) -> Option { - let uref = self.as_uref()?; - let addr = uref.addr(); - Some(Key::Hash(addr)) - } - - /// Casts a [`Key::Withdraw`] to a [`Key::Unbond`] - pub fn withdraw_to_unbond(&self) -> Option { - if let Key::Withdraw(account_hash) = self { - return Some(Key::Unbond(*account_hash)); - } - None - } - - /// Creates a new [`Key::Dictionary`] variant based on a `seed_uref` and a `dictionary_item_key` - /// bytes. - pub fn dictionary(seed_uref: URef, dictionary_item_key: &[u8]) -> Key { - // NOTE: Expect below is safe because the length passed is supported. - let mut hasher = VarBlake2b::new(BLAKE2B_DIGEST_LENGTH).expect("should create hasher"); - hasher.update(seed_uref.addr().as_ref()); - hasher.update(dictionary_item_key); - // NOTE: Assumed safe as size of `HashAddr` equals to the output provided by hasher. - let mut addr = HashAddr::default(); - hasher.finalize_variable(|hash| addr.clone_from_slice(hash)); - Key::Dictionary(addr) - } - - /// Returns true if the key is of type [`Key::Dictionary`]. - pub fn is_dictionary_key(&self) -> bool { - if let Key::Dictionary(_) = self { - return true; - } - false - } - - /// Returns a reference to the inner [`AccountHash`] if `self` is of type - /// [`Key::Withdraw`], otherwise returns `None`. - pub fn as_withdraw(&self) -> Option<&AccountHash> { - if let Self::Withdraw(v) = self { - Some(v) - } else { - None - } - } -} - -impl Display for Key { - fn fmt(&self, f: &mut Formatter) -> fmt::Result { - match self { - Key::Account(account_hash) => write!(f, "Key::Account({})", account_hash), - Key::Hash(addr) => write!(f, "Key::Hash({})", base16::encode_lower(&addr)), - Key::URef(uref) => write!(f, "Key::{}", uref), /* Display impl for URef will append */ - Key::Transfer(transfer_addr) => write!(f, "Key::Transfer({})", transfer_addr), - Key::DeployInfo(addr) => write!( - f, - "Key::DeployInfo({})", - base16::encode_lower(addr.as_bytes()) - ), - Key::EraInfo(era_id) => write!(f, "Key::EraInfo({})", era_id), - Key::Balance(uref_addr) => { - write!(f, "Key::Balance({})", base16::encode_lower(uref_addr)) - } - Key::Bid(account_hash) => write!(f, "Key::Bid({})", account_hash), - Key::Withdraw(account_hash) => write!(f, "Key::Withdraw({})", account_hash), - Key::Dictionary(addr) => { - write!(f, "Key::Dictionary({})", base16::encode_lower(addr)) - } - Key::SystemContractRegistry => write!( - f, - "Key::SystemContractRegistry({})", - base16::encode_lower(&PADDING_BYTES) - ), - Key::EraSummary => write!( - f, - "Key::EraSummary({})", - base16::encode_lower(&PADDING_BYTES), - ), - Key::Unbond(account_hash) => write!(f, "Key::Unbond({})", account_hash), - Key::ChainspecRegistry => write!( - f, - "Key::ChainspecRegistry({})", - base16::encode_lower(&PADDING_BYTES) - ), - Key::ChecksumRegistry => { - write!( - f, - "Key::ChecksumRegistry({})", - base16::encode_lower(&PADDING_BYTES) - ) - } - } - } -} - -impl Debug for Key { - fn fmt(&self, f: &mut Formatter) -> fmt::Result { - write!(f, "{}", self) - } -} - -impl Tagged for Key { - fn tag(&self) -> KeyTag { - match self { - Key::Account(_) => KeyTag::Account, - Key::Hash(_) => KeyTag::Hash, - Key::URef(_) => KeyTag::URef, - Key::Transfer(_) => KeyTag::Transfer, - Key::DeployInfo(_) => KeyTag::DeployInfo, - Key::EraInfo(_) => KeyTag::EraInfo, - Key::Balance(_) => KeyTag::Balance, - Key::Bid(_) => KeyTag::Bid, - Key::Withdraw(_) => KeyTag::Withdraw, - Key::Dictionary(_) => KeyTag::Dictionary, - Key::SystemContractRegistry => KeyTag::SystemContractRegistry, - Key::EraSummary => KeyTag::EraSummary, - Key::Unbond(_) => KeyTag::Unbond, - Key::ChainspecRegistry => KeyTag::ChainspecRegistry, - Key::ChecksumRegistry => KeyTag::ChecksumRegistry, - } - } -} - -impl Tagged for Key { - fn tag(&self) -> u8 { - let key_tag: KeyTag = self.tag(); - key_tag as u8 - } -} - -impl From for Key { - fn from(uref: URef) -> Key { - Key::URef(uref) - } -} - -impl From for Key { - fn from(account_hash: AccountHash) -> Key { - Key::Account(account_hash) - } -} - -impl From for Key { - fn from(transfer_addr: TransferAddr) -> Key { - Key::Transfer(transfer_addr) - } -} - -impl From for Key { - fn from(contract_hash: ContractHash) -> Key { - Key::Hash(contract_hash.value()) - } -} - -impl From for Key { - fn from(wasm_hash: ContractWasmHash) -> Key { - Key::Hash(wasm_hash.value()) - } -} - -impl From for Key { - fn from(package_hash: ContractPackageHash) -> Key { - Key::Hash(package_hash.value()) - } -} - -impl ToBytes for Key { - fn to_bytes(&self) -> Result, Error> { - let mut result = bytesrepr::unchecked_allocate_buffer(self); - self.write_bytes(&mut result)?; - Ok(result) - } - - fn serialized_length(&self) -> usize { - match self { - Key::Account(account_hash) => { - KEY_ID_SERIALIZED_LENGTH + account_hash.serialized_length() - } - Key::Hash(_) => KEY_HASH_SERIALIZED_LENGTH, - Key::URef(_) => KEY_UREF_SERIALIZED_LENGTH, - Key::Transfer(_) => KEY_TRANSFER_SERIALIZED_LENGTH, - Key::DeployInfo(_) => KEY_DEPLOY_INFO_SERIALIZED_LENGTH, - Key::EraInfo(_) => KEY_ERA_INFO_SERIALIZED_LENGTH, - Key::Balance(_) => KEY_BALANCE_SERIALIZED_LENGTH, - Key::Bid(_) => KEY_BID_SERIALIZED_LENGTH, - Key::Withdraw(_) => KEY_WITHDRAW_SERIALIZED_LENGTH, - Key::Dictionary(_) => KEY_DICTIONARY_SERIALIZED_LENGTH, - Key::SystemContractRegistry => KEY_SYSTEM_CONTRACT_REGISTRY_SERIALIZED_LENGTH, - Key::EraSummary => KEY_ERA_SUMMARY_SERIALIZED_LENGTH, - Key::Unbond(_) => KEY_UNBOND_SERIALIZED_LENGTH, - Key::ChainspecRegistry => KEY_CHAINSPEC_REGISTRY_SERIALIZED_LENGTH, - Key::ChecksumRegistry => KEY_CHECKSUM_REGISTRY_SERIALIZED_LENGTH, - } - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), Error> { - writer.push(self.tag()); - match self { - Key::Account(account_hash) => account_hash.write_bytes(writer), - Key::Hash(hash) => hash.write_bytes(writer), - Key::URef(uref) => uref.write_bytes(writer), - Key::Transfer(addr) => addr.write_bytes(writer), - Key::DeployInfo(deploy_hash) => deploy_hash.write_bytes(writer), - Key::EraInfo(era_id) => era_id.write_bytes(writer), - Key::Balance(uref_addr) => uref_addr.write_bytes(writer), - Key::Bid(account_hash) => account_hash.write_bytes(writer), - Key::Withdraw(account_hash) => account_hash.write_bytes(writer), - Key::Dictionary(addr) => addr.write_bytes(writer), - Key::Unbond(account_hash) => account_hash.write_bytes(writer), - Key::SystemContractRegistry - | Key::EraSummary - | Key::ChainspecRegistry - | Key::ChecksumRegistry => PADDING_BYTES.write_bytes(writer), - } - } -} - -impl FromBytes for Key { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (tag, remainder) = u8::from_bytes(bytes)?; - match tag { - tag if tag == KeyTag::Account as u8 => { - let (account_hash, rem) = AccountHash::from_bytes(remainder)?; - Ok((Key::Account(account_hash), rem)) - } - tag if tag == KeyTag::Hash as u8 => { - let (hash, rem) = HashAddr::from_bytes(remainder)?; - Ok((Key::Hash(hash), rem)) - } - tag if tag == KeyTag::URef as u8 => { - let (uref, rem) = URef::from_bytes(remainder)?; - Ok((Key::URef(uref), rem)) - } - tag if tag == KeyTag::Transfer as u8 => { - let (transfer_addr, rem) = TransferAddr::from_bytes(remainder)?; - Ok((Key::Transfer(transfer_addr), rem)) - } - tag if tag == KeyTag::DeployInfo as u8 => { - let (deploy_hash, rem) = DeployHash::from_bytes(remainder)?; - Ok((Key::DeployInfo(deploy_hash), rem)) - } - tag if tag == KeyTag::EraInfo as u8 => { - let (era_id, rem) = EraId::from_bytes(remainder)?; - Ok((Key::EraInfo(era_id), rem)) - } - tag if tag == KeyTag::Balance as u8 => { - let (uref_addr, rem) = URefAddr::from_bytes(remainder)?; - Ok((Key::Balance(uref_addr), rem)) - } - tag if tag == KeyTag::Bid as u8 => { - let (account_hash, rem) = AccountHash::from_bytes(remainder)?; - Ok((Key::Bid(account_hash), rem)) - } - tag if tag == KeyTag::Withdraw as u8 => { - let (account_hash, rem) = AccountHash::from_bytes(remainder)?; - Ok((Key::Withdraw(account_hash), rem)) - } - tag if tag == KeyTag::Dictionary as u8 => { - let (addr, rem) = DictionaryAddr::from_bytes(remainder)?; - Ok((Key::Dictionary(addr), rem)) - } - tag if tag == KeyTag::SystemContractRegistry as u8 => { - let (_, rem) = <[u8; 32]>::from_bytes(remainder)?; - Ok((Key::SystemContractRegistry, rem)) - } - tag if tag == KeyTag::EraSummary as u8 => { - let (_, rem) = <[u8; 32]>::from_bytes(remainder)?; - Ok((Key::EraSummary, rem)) - } - tag if tag == KeyTag::Unbond as u8 => { - let (account_hash, rem) = AccountHash::from_bytes(remainder)?; - Ok((Key::Unbond(account_hash), rem)) - } - tag if tag == KeyTag::ChainspecRegistry as u8 => { - let (_, rem) = <[u8; 32]>::from_bytes(remainder)?; - Ok((Key::ChainspecRegistry, rem)) - } - tag if tag == KeyTag::ChecksumRegistry as u8 => { - let (_, rem) = <[u8; 32]>::from_bytes(remainder)?; - Ok((Key::ChecksumRegistry, rem)) - } - _ => Err(Error::Formatting), - } - } -} - -#[allow(dead_code)] -fn please_add_to_distribution_impl(key: Key) { - // If you've been forced to come here, you likely need to add your variant to the - // `Distribution` impl for `Key`. - match key { - Key::Account(_) => unimplemented!(), - Key::Hash(_) => unimplemented!(), - Key::URef(_) => unimplemented!(), - Key::Transfer(_) => unimplemented!(), - Key::DeployInfo(_) => unimplemented!(), - Key::EraInfo(_) => unimplemented!(), - Key::Balance(_) => unimplemented!(), - Key::Bid(_) => unimplemented!(), - Key::Withdraw(_) => unimplemented!(), - Key::Dictionary(_) => unimplemented!(), - Key::SystemContractRegistry => unimplemented!(), - Key::EraSummary => unimplemented!(), - Key::Unbond(_) => unimplemented!(), - Key::ChainspecRegistry => unimplemented!(), - Key::ChecksumRegistry => unimplemented!(), - } -} - -impl Distribution for Standard { - fn sample(&self, rng: &mut R) -> Key { - match rng.gen_range(0..=14) { - 0 => Key::Account(rng.gen()), - 1 => Key::Hash(rng.gen()), - 2 => Key::URef(rng.gen()), - 3 => Key::Transfer(rng.gen()), - 4 => Key::DeployInfo(rng.gen()), - 5 => Key::EraInfo(rng.gen()), - 6 => Key::Balance(rng.gen()), - 7 => Key::Bid(rng.gen()), - 8 => Key::Withdraw(rng.gen()), - 9 => Key::Dictionary(rng.gen()), - 10 => Key::SystemContractRegistry, - 11 => Key::EraSummary, - 12 => Key::Unbond(rng.gen()), - 13 => Key::ChainspecRegistry, - 14 => Key::ChecksumRegistry, - _ => unreachable!(), - } - } -} - -mod serde_helpers { - use super::*; - - #[derive(Serialize, Deserialize)] - pub(super) enum HumanReadable { - Account(String), - Hash(String), - URef(String), - Transfer(String), - DeployInfo(String), - EraInfo(String), - Balance(String), - Bid(String), - Withdraw(String), - Dictionary(String), - SystemContractRegistry(String), - EraSummary(String), - Unbond(String), - ChainspecRegistry(String), - ChecksumRegistry(String), - } - - impl From<&Key> for HumanReadable { - fn from(key: &Key) -> Self { - let formatted_string = key.to_formatted_string(); - match key { - Key::Account(_) => HumanReadable::Account(formatted_string), - Key::Hash(_) => HumanReadable::Hash(formatted_string), - Key::URef(_) => HumanReadable::URef(formatted_string), - Key::Transfer(_) => HumanReadable::Transfer(formatted_string), - Key::DeployInfo(_) => HumanReadable::DeployInfo(formatted_string), - Key::EraInfo(_) => HumanReadable::EraInfo(formatted_string), - Key::Balance(_) => HumanReadable::Balance(formatted_string), - Key::Bid(_) => HumanReadable::Bid(formatted_string), - Key::Withdraw(_) => HumanReadable::Withdraw(formatted_string), - Key::Dictionary(_) => HumanReadable::Dictionary(formatted_string), - Key::SystemContractRegistry => { - HumanReadable::SystemContractRegistry(formatted_string) - } - Key::EraSummary => HumanReadable::EraSummary(formatted_string), - Key::Unbond(_) => HumanReadable::Unbond(formatted_string), - Key::ChainspecRegistry => HumanReadable::ChainspecRegistry(formatted_string), - Key::ChecksumRegistry => HumanReadable::ChecksumRegistry(formatted_string), - } - } - } - - impl TryFrom for Key { - type Error = FromStrError; - - fn try_from(helper: HumanReadable) -> Result { - match helper { - HumanReadable::Account(formatted_string) - | HumanReadable::Hash(formatted_string) - | HumanReadable::URef(formatted_string) - | HumanReadable::Transfer(formatted_string) - | HumanReadable::DeployInfo(formatted_string) - | HumanReadable::EraInfo(formatted_string) - | HumanReadable::Balance(formatted_string) - | HumanReadable::Bid(formatted_string) - | HumanReadable::Withdraw(formatted_string) - | HumanReadable::Dictionary(formatted_string) - | HumanReadable::SystemContractRegistry(formatted_string) - | HumanReadable::EraSummary(formatted_string) - | HumanReadable::Unbond(formatted_string) - | HumanReadable::ChainspecRegistry(formatted_string) - | HumanReadable::ChecksumRegistry(formatted_string) => { - Key::from_formatted_str(&formatted_string) - } - } - } - } - - #[derive(Serialize)] - pub(super) enum BinarySerHelper<'a> { - Account(&'a AccountHash), - Hash(&'a HashAddr), - URef(&'a URef), - Transfer(&'a TransferAddr), - DeployInfo(&'a DeployHash), - EraInfo(&'a EraId), - Balance(&'a URefAddr), - Bid(&'a AccountHash), - Withdraw(&'a AccountHash), - Dictionary(&'a HashAddr), - SystemContractRegistry, - EraSummary, - Unbond(&'a AccountHash), - ChainspecRegistry, - ChecksumRegistry, - } - - impl<'a> From<&'a Key> for BinarySerHelper<'a> { - fn from(key: &'a Key) -> Self { - match key { - Key::Account(account_hash) => BinarySerHelper::Account(account_hash), - Key::Hash(hash_addr) => BinarySerHelper::Hash(hash_addr), - Key::URef(uref) => BinarySerHelper::URef(uref), - Key::Transfer(transfer_addr) => BinarySerHelper::Transfer(transfer_addr), - Key::DeployInfo(deploy_hash) => BinarySerHelper::DeployInfo(deploy_hash), - Key::EraInfo(era_id) => BinarySerHelper::EraInfo(era_id), - Key::Balance(uref_addr) => BinarySerHelper::Balance(uref_addr), - Key::Bid(account_hash) => BinarySerHelper::Bid(account_hash), - Key::Withdraw(account_hash) => BinarySerHelper::Withdraw(account_hash), - Key::Dictionary(addr) => BinarySerHelper::Dictionary(addr), - Key::SystemContractRegistry => BinarySerHelper::SystemContractRegistry, - Key::EraSummary => BinarySerHelper::EraSummary, - Key::Unbond(account_hash) => BinarySerHelper::Unbond(account_hash), - Key::ChainspecRegistry => BinarySerHelper::ChainspecRegistry, - Key::ChecksumRegistry => BinarySerHelper::ChecksumRegistry, - } - } - } - - #[derive(Deserialize)] - pub(super) enum BinaryDeserHelper { - Account(AccountHash), - Hash(HashAddr), - URef(URef), - Transfer(TransferAddr), - DeployInfo(DeployHash), - EraInfo(EraId), - Balance(URefAddr), - Bid(AccountHash), - Withdraw(AccountHash), - Dictionary(DictionaryAddr), - SystemContractRegistry, - EraSummary, - Unbond(AccountHash), - ChainspecRegistry, - ChecksumRegistry, - } - - impl From for Key { - fn from(helper: BinaryDeserHelper) -> Self { - match helper { - BinaryDeserHelper::Account(account_hash) => Key::Account(account_hash), - BinaryDeserHelper::Hash(hash_addr) => Key::Hash(hash_addr), - BinaryDeserHelper::URef(uref) => Key::URef(uref), - BinaryDeserHelper::Transfer(transfer_addr) => Key::Transfer(transfer_addr), - BinaryDeserHelper::DeployInfo(deploy_hash) => Key::DeployInfo(deploy_hash), - BinaryDeserHelper::EraInfo(era_id) => Key::EraInfo(era_id), - BinaryDeserHelper::Balance(uref_addr) => Key::Balance(uref_addr), - BinaryDeserHelper::Bid(account_hash) => Key::Bid(account_hash), - BinaryDeserHelper::Withdraw(account_hash) => Key::Withdraw(account_hash), - BinaryDeserHelper::Dictionary(addr) => Key::Dictionary(addr), - BinaryDeserHelper::SystemContractRegistry => Key::SystemContractRegistry, - BinaryDeserHelper::EraSummary => Key::EraSummary, - BinaryDeserHelper::Unbond(account_hash) => Key::Unbond(account_hash), - BinaryDeserHelper::ChainspecRegistry => Key::ChainspecRegistry, - BinaryDeserHelper::ChecksumRegistry => Key::ChecksumRegistry, - } - } - } -} - -impl Serialize for Key { - fn serialize(&self, serializer: S) -> Result { - if serializer.is_human_readable() { - serde_helpers::HumanReadable::from(self).serialize(serializer) - } else { - serde_helpers::BinarySerHelper::from(self).serialize(serializer) - } - } -} - -impl<'de> Deserialize<'de> for Key { - fn deserialize>(deserializer: D) -> Result { - if deserializer.is_human_readable() { - let human_readable = serde_helpers::HumanReadable::deserialize(deserializer)?; - Key::try_from(human_readable).map_err(SerdeError::custom) - } else { - let binary_helper = serde_helpers::BinaryDeserHelper::deserialize(deserializer)?; - Ok(Key::from(binary_helper)) - } - } -} - -#[cfg(test)] -mod tests { - use std::string::ToString; - - use serde_json::json; - - use super::*; - use crate::{ - account::ACCOUNT_HASH_FORMATTED_STRING_PREFIX, - bytesrepr::{Error, FromBytes}, - transfer::TRANSFER_ADDR_FORMATTED_STRING_PREFIX, - uref::UREF_FORMATTED_STRING_PREFIX, - AccessRights, URef, - }; - - const ACCOUNT_KEY: Key = Key::Account(AccountHash::new([42; 32])); - const HASH_KEY: Key = Key::Hash([42; 32]); - const UREF_KEY: Key = Key::URef(URef::new([42; 32], AccessRights::READ)); - const TRANSFER_KEY: Key = Key::Transfer(TransferAddr::new([42; 32])); - const DEPLOY_INFO_KEY: Key = Key::DeployInfo(DeployHash::new([42; 32])); - const ERA_INFO_KEY: Key = Key::EraInfo(EraId::new(42)); - const BALANCE_KEY: Key = Key::Balance([42; 32]); - const BID_KEY: Key = Key::Bid(AccountHash::new([42; 32])); - const WITHDRAW_KEY: Key = Key::Withdraw(AccountHash::new([42; 32])); - const DICTIONARY_KEY: Key = Key::Dictionary([42; 32]); - const SYSTEM_CONTRACT_REGISTRY_KEY: Key = Key::SystemContractRegistry; - const ERA_SUMMARY_KEY: Key = Key::EraSummary; - const UNBOND_KEY: Key = Key::Unbond(AccountHash::new([42; 32])); - const CHAINSPEC_REGISTRY_KEY: Key = Key::ChainspecRegistry; - const CHECKSUM_REGISTRY_KEY: Key = Key::ChecksumRegistry; - const KEYS: &[Key] = &[ - ACCOUNT_KEY, - HASH_KEY, - UREF_KEY, - TRANSFER_KEY, - DEPLOY_INFO_KEY, - ERA_INFO_KEY, - BALANCE_KEY, - BID_KEY, - WITHDRAW_KEY, - DICTIONARY_KEY, - SYSTEM_CONTRACT_REGISTRY_KEY, - ERA_SUMMARY_KEY, - UNBOND_KEY, - CHAINSPEC_REGISTRY_KEY, - CHECKSUM_REGISTRY_KEY, - ]; - const HEX_STRING: &str = "2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a"; - - fn test_readable(right: AccessRights, is_true: bool) { - assert_eq!(right.is_readable(), is_true) - } - - #[test] - fn test_is_readable() { - test_readable(AccessRights::READ, true); - test_readable(AccessRights::READ_ADD, true); - test_readable(AccessRights::READ_WRITE, true); - test_readable(AccessRights::READ_ADD_WRITE, true); - test_readable(AccessRights::ADD, false); - test_readable(AccessRights::ADD_WRITE, false); - test_readable(AccessRights::WRITE, false); - } - - fn test_writable(right: AccessRights, is_true: bool) { - assert_eq!(right.is_writeable(), is_true) - } - - #[test] - fn test_is_writable() { - test_writable(AccessRights::WRITE, true); - test_writable(AccessRights::READ_WRITE, true); - test_writable(AccessRights::ADD_WRITE, true); - test_writable(AccessRights::READ, false); - test_writable(AccessRights::ADD, false); - test_writable(AccessRights::READ_ADD, false); - test_writable(AccessRights::READ_ADD_WRITE, true); - } - - fn test_addable(right: AccessRights, is_true: bool) { - assert_eq!(right.is_addable(), is_true) - } - - #[test] - fn test_is_addable() { - test_addable(AccessRights::ADD, true); - test_addable(AccessRights::READ_ADD, true); - test_addable(AccessRights::READ_WRITE, false); - test_addable(AccessRights::ADD_WRITE, true); - test_addable(AccessRights::READ, false); - test_addable(AccessRights::WRITE, false); - test_addable(AccessRights::READ_ADD_WRITE, true); - } - - #[test] - fn should_display_key() { - assert_eq!( - format!("{}", ACCOUNT_KEY), - format!("Key::Account({})", HEX_STRING) - ); - assert_eq!( - format!("{}", HASH_KEY), - format!("Key::Hash({})", HEX_STRING) - ); - assert_eq!( - format!("{}", UREF_KEY), - format!("Key::URef({}, READ)", HEX_STRING) - ); - assert_eq!( - format!("{}", TRANSFER_KEY), - format!("Key::Transfer({})", HEX_STRING) - ); - assert_eq!( - format!("{}", DEPLOY_INFO_KEY), - format!("Key::DeployInfo({})", HEX_STRING) - ); - assert_eq!( - format!("{}", ERA_INFO_KEY), - "Key::EraInfo(era 42)".to_string() - ); - assert_eq!( - format!("{}", BALANCE_KEY), - format!("Key::Balance({})", HEX_STRING) - ); - assert_eq!(format!("{}", BID_KEY), format!("Key::Bid({})", HEX_STRING)); - assert_eq!( - format!("{}", WITHDRAW_KEY), - format!("Key::Withdraw({})", HEX_STRING) - ); - assert_eq!( - format!("{}", DICTIONARY_KEY), - format!("Key::Dictionary({})", HEX_STRING) - ); - assert_eq!( - format!("{}", SYSTEM_CONTRACT_REGISTRY_KEY), - format!( - "Key::SystemContractRegistry({})", - base16::encode_lower(&PADDING_BYTES) - ) - ); - assert_eq!( - format!("{}", ERA_SUMMARY_KEY), - format!("Key::EraSummary({})", base16::encode_lower(&PADDING_BYTES)) - ); - assert_eq!( - format!("{}", UNBOND_KEY), - format!("Key::Unbond({})", HEX_STRING) - ); - assert_eq!( - format!("{}", CHAINSPEC_REGISTRY_KEY), - format!( - "Key::ChainspecRegistry({})", - base16::encode_lower(&PADDING_BYTES) - ) - ); - assert_eq!( - format!("{}", CHECKSUM_REGISTRY_KEY), - format!( - "Key::ChecksumRegistry({})", - base16::encode_lower(&PADDING_BYTES), - ) - ); - } - - #[test] - fn abuse_vec_key() { - // Prefix is 2^32-1 = shouldn't allocate that much - let bytes: Vec = vec![255, 255, 255, 255, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9]; - let res: Result<(Vec, &[u8]), _> = FromBytes::from_bytes(&bytes); - #[cfg(target_os = "linux")] - assert_eq!(res.expect_err("should fail"), Error::OutOfMemory); - #[cfg(target_os = "macos")] - assert_eq!(res.expect_err("should fail"), Error::EarlyEndOfStream); - } - - #[test] - fn check_key_account_getters() { - let account = [42; 32]; - let account_hash = AccountHash::new(account); - let key1 = Key::Account(account_hash); - assert_eq!(key1.into_account(), Some(account_hash)); - assert!(key1.into_hash().is_none()); - assert!(key1.as_uref().is_none()); - } - - #[test] - fn check_key_hash_getters() { - let hash = [42; KEY_HASH_LENGTH]; - let key1 = Key::Hash(hash); - assert!(key1.into_account().is_none()); - assert_eq!(key1.into_hash(), Some(hash)); - assert!(key1.as_uref().is_none()); - } - - #[test] - fn check_key_uref_getters() { - let uref = URef::new([42; 32], AccessRights::READ_ADD_WRITE); - let key1 = Key::URef(uref); - assert!(key1.into_account().is_none()); - assert!(key1.into_hash().is_none()); - assert_eq!(key1.as_uref(), Some(&uref)); - } - - #[test] - fn key_max_serialized_length() { - let mut got_max = false; - for key in KEYS { - assert!(key.serialized_length() <= Key::max_serialized_length()); - if key.serialized_length() == Key::max_serialized_length() { - got_max = true; - } - } - assert!( - got_max, - "None of the Key variants has a serialized_length equal to \ - Key::max_serialized_length(), so Key::max_serialized_length() should be reduced" - ); - } - - #[test] - fn should_parse_key_from_str() { - for key in KEYS { - let string = key.to_formatted_string(); - let parsed_key = Key::from_formatted_str(&string).unwrap(); - assert_eq!(parsed_key, *key, "{string} (key = {key:?})"); - } - } - - #[test] - fn should_fail_to_parse_key_from_str() { - assert!( - Key::from_formatted_str(ACCOUNT_HASH_FORMATTED_STRING_PREFIX) - .unwrap_err() - .to_string() - .starts_with("account-key from string error: ") - ); - assert!(Key::from_formatted_str(HASH_PREFIX) - .unwrap_err() - .to_string() - .starts_with("hash-key from string error: ")); - assert!(Key::from_formatted_str(UREF_FORMATTED_STRING_PREFIX) - .unwrap_err() - .to_string() - .starts_with("uref-key from string error: ")); - assert!( - Key::from_formatted_str(TRANSFER_ADDR_FORMATTED_STRING_PREFIX) - .unwrap_err() - .to_string() - .starts_with("transfer-key from string error: ") - ); - assert!(Key::from_formatted_str(DEPLOY_INFO_PREFIX) - .unwrap_err() - .to_string() - .starts_with("deploy-info-key from string error: ")); - assert!(Key::from_formatted_str(ERA_INFO_PREFIX) - .unwrap_err() - .to_string() - .starts_with("era-info-key from string error: ")); - assert!(Key::from_formatted_str(BALANCE_PREFIX) - .unwrap_err() - .to_string() - .starts_with("balance-key from string error: ")); - assert!(Key::from_formatted_str(BID_PREFIX) - .unwrap_err() - .to_string() - .starts_with("bid-key from string error: ")); - assert!(Key::from_formatted_str(WITHDRAW_PREFIX) - .unwrap_err() - .to_string() - .starts_with("withdraw-key from string error: ")); - assert!(Key::from_formatted_str(DICTIONARY_PREFIX) - .unwrap_err() - .to_string() - .starts_with("dictionary-key from string error: ")); - assert!(Key::from_formatted_str(SYSTEM_CONTRACT_REGISTRY_PREFIX) - .unwrap_err() - .to_string() - .starts_with("system-contract-registry-key from string error: ")); - assert!(Key::from_formatted_str(ERA_SUMMARY_PREFIX) - .unwrap_err() - .to_string() - .starts_with("era-summary-key from string error")); - assert!(Key::from_formatted_str(UNBOND_PREFIX) - .unwrap_err() - .to_string() - .starts_with("unbond-key from string error: ")); - assert!(Key::from_formatted_str(CHAINSPEC_REGISTRY_PREFIX) - .unwrap_err() - .to_string() - .starts_with("chainspec-registry-key from string error: ")); - assert!(Key::from_formatted_str(CHECKSUM_REGISTRY_PREFIX) - .unwrap_err() - .to_string() - .starts_with("checksum-registry-key from string error: ")); - let invalid_prefix = "a-0000000000000000000000000000000000000000000000000000000000000000"; - assert_eq!( - Key::from_formatted_str(invalid_prefix) - .unwrap_err() - .to_string(), - "unknown prefix for key" - ); - - let missing_hyphen_prefix = - "hash0000000000000000000000000000000000000000000000000000000000000000"; - assert_eq!( - Key::from_formatted_str(missing_hyphen_prefix) - .unwrap_err() - .to_string(), - "unknown prefix for key" - ); - - let no_prefix = "0000000000000000000000000000000000000000000000000000000000000000"; - assert_eq!( - Key::from_formatted_str(no_prefix).unwrap_err().to_string(), - "unknown prefix for key" - ); - } - - #[test] - fn key_to_json() { - let expected_json = &[ - json!({ "Account": format!("account-hash-{}", HEX_STRING) }), - json!({ "Hash": format!("hash-{}", HEX_STRING) }), - json!({ "URef": format!("uref-{}-001", HEX_STRING) }), - json!({ "Transfer": format!("transfer-{}", HEX_STRING) }), - json!({ "DeployInfo": format!("deploy-{}", HEX_STRING) }), - json!({ "EraInfo": "era-42" }), - json!({ "Balance": format!("balance-{}", HEX_STRING) }), - json!({ "Bid": format!("bid-{}", HEX_STRING) }), - json!({ "Withdraw": format!("withdraw-{}", HEX_STRING) }), - json!({ "Dictionary": format!("dictionary-{}", HEX_STRING) }), - json!({ - "SystemContractRegistry": - format!( - "system-contract-registry-{}", - base16::encode_lower(&PADDING_BYTES) - ) - }), - json!({ - "EraSummary": format!("era-summary-{}", base16::encode_lower(&PADDING_BYTES)) - }), - json!({ "Unbond": format!("unbond-{}", HEX_STRING) }), - json!({ - "ChainspecRegistry": - format!( - "chainspec-registry-{}", - base16::encode_lower(&PADDING_BYTES) - ) - }), - json!({ - "ChecksumRegistry": - format!("checksum-registry-{}", base16::encode_lower(&PADDING_BYTES)) - }), - ]; - - assert_eq!( - KEYS.len(), - expected_json.len(), - "There should be exactly one expected JSON string per test key" - ); - - for (key, expected_json_key) in KEYS.iter().zip(expected_json.iter()) { - assert_eq!(serde_json::to_value(key).unwrap(), *expected_json_key); - } - } - - #[test] - fn serialization_roundtrip_bincode() { - for key in KEYS { - let encoded = bincode::serialize(key).unwrap(); - let decoded = bincode::deserialize(&encoded).unwrap(); - assert_eq!(key, &decoded); - } - } - - #[test] - fn serialization_roundtrip_json() { - let round_trip = |key: &Key| { - let encoded = serde_json::to_value(key).unwrap(); - let decoded = serde_json::from_value(encoded).unwrap(); - assert_eq!(key, &decoded); - }; - - for key in KEYS { - round_trip(key); - } - - let zeros = [0; BLAKE2B_DIGEST_LENGTH]; - - round_trip(&Key::Account(AccountHash::new(zeros))); - round_trip(&Key::Hash(zeros)); - round_trip(&Key::URef(URef::new(zeros, AccessRights::READ))); - round_trip(&Key::Transfer(TransferAddr::new(zeros))); - round_trip(&Key::DeployInfo(DeployHash::new(zeros))); - round_trip(&Key::EraInfo(EraId::from(0))); - round_trip(&Key::Balance(URef::new(zeros, AccessRights::READ).addr())); - round_trip(&Key::Bid(AccountHash::new(zeros))); - round_trip(&Key::Withdraw(AccountHash::new(zeros))); - round_trip(&Key::Dictionary(zeros)); - round_trip(&Key::SystemContractRegistry); - round_trip(&Key::EraSummary); - round_trip(&Key::Unbond(AccountHash::new(zeros))); - round_trip(&Key::ChainspecRegistry); - round_trip(&Key::ChecksumRegistry); - } -} diff --git a/casper_types/src/lib.rs b/casper_types/src/lib.rs deleted file mode 100644 index c2aeac55..00000000 --- a/casper_types/src/lib.rs +++ /dev/null @@ -1,113 +0,0 @@ -//! Types used to allow creation of Wasm contracts and tests for use on the Casper Platform. - -#![cfg_attr( - not(any( - feature = "json-schema", - feature = "datasize", - feature = "std", - feature = "testing", - test, - )), - no_std -)] -#![doc(html_root_url = "https://docs.rs/casper-types/4.0.1")] -#![doc( - html_favicon_url = "https://raw.githubusercontent.com/casper-network/casper-node/blob/dev/images/Casper_Logo_Favicon_48.png", - html_logo_url = "https://raw.githubusercontent.com/casper-network/casper-node/blob/dev/images/Casper_Logo_Favicon.png", - test(attr(forbid(warnings))) -)] -#![warn(missing_docs)] - -#[cfg_attr(not(test), macro_use)] -extern crate alloc; - -mod access_rights; -pub mod account; -pub mod api_error; -mod block_time; -pub mod bytesrepr; -pub mod checksummed_hex; -mod cl_type; -mod cl_value; -mod contract_wasm; -pub mod contracts; -pub mod crypto; -mod deploy_info; -mod era_id; -mod execution_result; -#[cfg(any(feature = "std", test))] -pub mod file_utils; -mod gas; -#[cfg(any(feature = "testing", feature = "gens", test))] -pub mod gens; -mod json_pretty_printer; -mod key; -mod motes; -mod named_key; -mod phase; -mod protocol_version; -pub mod runtime_args; -mod semver; -mod stored_value; -pub mod system; -mod tagged; -#[cfg(any(feature = "testing", test))] -pub mod testing; -mod timestamp; -mod transfer; -mod transfer_result; -mod uint; -mod uref; - -pub use access_rights::{ - AccessRights, ContextAccessRights, GrantedAccess, ACCESS_RIGHTS_SERIALIZED_LENGTH, -}; -#[doc(inline)] -pub use api_error::ApiError; -pub use block_time::{BlockTime, BLOCKTIME_SERIALIZED_LENGTH}; -pub use cl_type::{named_key_type, CLType, CLTyped}; -pub use cl_value::{CLTypeMismatch, CLValue, CLValueError}; -pub use contract_wasm::{ContractWasm, ContractWasmHash}; -#[doc(inline)] -pub use contracts::{ - Contract, ContractHash, ContractPackage, ContractPackageHash, ContractVersion, - ContractVersionKey, EntryPoint, EntryPointAccess, EntryPointType, EntryPoints, Group, - Parameter, -}; -pub use crypto::*; -pub use deploy_info::DeployInfo; -pub use execution_result::{ - ExecutionEffect, ExecutionResult, OpKind, Operation, Transform, TransformEntry, -}; -pub use gas::Gas; -pub use json_pretty_printer::json_pretty_print; -#[doc(inline)] -pub use key::{ - DictionaryAddr, FromStrError as KeyFromStrError, HashAddr, Key, KeyTag, BLAKE2B_DIGEST_LENGTH, - DICTIONARY_ITEM_KEY_MAX_LENGTH, KEY_DICTIONARY_LENGTH, KEY_HASH_LENGTH, -}; -pub use motes::Motes; -pub use named_key::NamedKey; -pub use phase::{Phase, PHASE_SERIALIZED_LENGTH}; -pub use protocol_version::{ProtocolVersion, VersionCheckResult}; -#[doc(inline)] -pub use runtime_args::{NamedArg, RuntimeArgs}; -pub use semver::{ParseSemVerError, SemVer, SEM_VER_SERIALIZED_LENGTH}; -pub use stored_value::{StoredValue, TypeMismatch as StoredValueTypeMismatch}; -pub use tagged::Tagged; -#[cfg(any(feature = "std", test))] -pub use timestamp::serde_option_time_diff; -pub use timestamp::{TimeDiff, Timestamp}; -pub use transfer::{ - DeployHash, FromStrError as TransferFromStrError, Transfer, TransferAddr, DEPLOY_HASH_LENGTH, - TRANSFER_ADDR_LENGTH, -}; -pub use transfer_result::{TransferResult, TransferredTo}; -pub use uref::{ - FromStrError as URefFromStrError, URef, URefAddr, UREF_ADDR_LENGTH, UREF_SERIALIZED_LENGTH, -}; - -pub use crate::{ - era_id::EraId, - uint::{UIntParseError, U128, U256, U512}, -}; diff --git a/casper_types/src/motes.rs b/casper_types/src/motes.rs deleted file mode 100644 index 8008a81c..00000000 --- a/casper_types/src/motes.rs +++ /dev/null @@ -1,248 +0,0 @@ -//! The `motes` module is used for working with Motes. - -use alloc::vec::Vec; -use core::{ - fmt, - iter::Sum, - ops::{Add, Div, Mul, Sub}, -}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -use num::Zero; -use serde::{Deserialize, Serialize}; - -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - Gas, U512, -}; - -/// A struct representing a number of `Motes`. -#[derive(Debug, Default, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -pub struct Motes(U512); - -impl Motes { - /// Constructs a new `Motes`. - pub fn new(value: U512) -> Motes { - Motes(value) - } - - /// Checked integer addition. Computes `self + rhs`, returning `None` if overflow occurred. - pub fn checked_add(&self, rhs: Self) -> Option { - self.0.checked_add(rhs.value()).map(Self::new) - } - - /// Checked integer subtraction. Computes `self - rhs`, returning `None` if underflow occurred. - pub fn checked_sub(&self, rhs: Self) -> Option { - self.0.checked_sub(rhs.value()).map(Self::new) - } - - /// Returns the inner `U512` value. - pub fn value(&self) -> U512 { - self.0 - } - - /// Converts the given `gas` to `Motes` by multiplying them by `conv_rate`. - /// - /// Returns `None` if an arithmetic overflow occurred. - pub fn from_gas(gas: Gas, conv_rate: u64) -> Option { - gas.value() - .checked_mul(U512::from(conv_rate)) - .map(Self::new) - } -} - -impl fmt::Display for Motes { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{:?}", self.0) - } -} - -impl Add for Motes { - type Output = Motes; - - fn add(self, rhs: Self) -> Self::Output { - let val = self.value() + rhs.value(); - Motes::new(val) - } -} - -impl Sub for Motes { - type Output = Motes; - - fn sub(self, rhs: Self) -> Self::Output { - let val = self.value() - rhs.value(); - Motes::new(val) - } -} - -impl Div for Motes { - type Output = Motes; - - fn div(self, rhs: Self) -> Self::Output { - let val = self.value() / rhs.value(); - Motes::new(val) - } -} - -impl Mul for Motes { - type Output = Motes; - - fn mul(self, rhs: Self) -> Self::Output { - let val = self.value() * rhs.value(); - Motes::new(val) - } -} - -impl Zero for Motes { - fn zero() -> Self { - Motes::new(U512::zero()) - } - - fn is_zero(&self) -> bool { - self.0.is_zero() - } -} - -impl Sum for Motes { - fn sum>(iter: I) -> Self { - iter.fold(Motes::zero(), Add::add) - } -} - -impl ToBytes for Motes { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - self.0.to_bytes() - } - - fn serialized_length(&self) -> usize { - self.0.serialized_length() - } -} - -impl FromBytes for Motes { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (value, remainder) = FromBytes::from_bytes(bytes)?; - Ok((Motes::new(value), remainder)) - } -} - -#[cfg(test)] -mod tests { - use crate::U512; - - use crate::{Gas, Motes}; - - #[test] - fn should_be_able_to_get_instance_of_motes() { - let initial_value = 1; - let motes = Motes::new(U512::from(initial_value)); - assert_eq!( - initial_value, - motes.value().as_u64(), - "should have equal value" - ) - } - - #[test] - fn should_be_able_to_compare_two_instances_of_motes() { - let left_motes = Motes::new(U512::from(1)); - let right_motes = Motes::new(U512::from(1)); - assert_eq!(left_motes, right_motes, "should be equal"); - let right_motes = Motes::new(U512::from(2)); - assert_ne!(left_motes, right_motes, "should not be equal") - } - - #[test] - fn should_be_able_to_add_two_instances_of_motes() { - let left_motes = Motes::new(U512::from(1)); - let right_motes = Motes::new(U512::from(1)); - let expected_motes = Motes::new(U512::from(2)); - assert_eq!( - (left_motes + right_motes), - expected_motes, - "should be equal" - ) - } - - #[test] - fn should_be_able_to_subtract_two_instances_of_motes() { - let left_motes = Motes::new(U512::from(1)); - let right_motes = Motes::new(U512::from(1)); - let expected_motes = Motes::new(U512::from(0)); - assert_eq!( - (left_motes - right_motes), - expected_motes, - "should be equal" - ) - } - - #[test] - fn should_be_able_to_multiply_two_instances_of_motes() { - let left_motes = Motes::new(U512::from(100)); - let right_motes = Motes::new(U512::from(10)); - let expected_motes = Motes::new(U512::from(1000)); - assert_eq!( - (left_motes * right_motes), - expected_motes, - "should be equal" - ) - } - - #[test] - fn should_be_able_to_divide_two_instances_of_motes() { - let left_motes = Motes::new(U512::from(1000)); - let right_motes = Motes::new(U512::from(100)); - let expected_motes = Motes::new(U512::from(10)); - assert_eq!( - (left_motes / right_motes), - expected_motes, - "should be equal" - ) - } - - #[test] - fn should_be_able_to_convert_from_motes() { - let gas = Gas::new(U512::from(100)); - let motes = Motes::from_gas(gas, 10).expect("should have value"); - let expected_motes = Motes::new(U512::from(1000)); - assert_eq!(motes, expected_motes, "should be equal") - } - - #[test] - fn should_be_able_to_default() { - let motes = Motes::default(); - let expected_motes = Motes::new(U512::from(0)); - assert_eq!(motes, expected_motes, "should be equal") - } - - #[test] - fn should_be_able_to_compare_relative_value() { - let left_motes = Motes::new(U512::from(100)); - let right_motes = Motes::new(U512::from(10)); - assert!(left_motes > right_motes, "should be gt"); - let right_motes = Motes::new(U512::from(100)); - assert!(left_motes >= right_motes, "should be gte"); - assert!(left_motes <= right_motes, "should be lte"); - let left_motes = Motes::new(U512::from(10)); - assert!(left_motes < right_motes, "should be lt"); - } - - #[test] - fn should_default() { - let left_motes = Motes::new(U512::from(0)); - let right_motes = Motes::default(); - assert_eq!(left_motes, right_motes, "should be equal"); - let u512 = U512::zero(); - assert_eq!(left_motes.value(), u512, "should be equal"); - } - - #[test] - fn should_support_checked_mul_from_gas() { - let gas = Gas::new(U512::MAX); - let conv_rate = 10; - let maybe = Motes::from_gas(gas, conv_rate); - assert!(maybe.is_none(), "should be none due to overflow"); - } -} diff --git a/casper_types/src/named_key.rs b/casper_types/src/named_key.rs deleted file mode 100644 index 29214a52..00000000 --- a/casper_types/src/named_key.rs +++ /dev/null @@ -1,46 +0,0 @@ -// TODO - remove once schemars stops causing warning. -#![allow(clippy::field_reassign_with_default)] - -use alloc::{string::String, vec::Vec}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -use crate::bytesrepr::{self, FromBytes, ToBytes}; - -/// A named key. -#[derive(Clone, Eq, PartialEq, Serialize, Deserialize, Default, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[serde(deny_unknown_fields)] -pub struct NamedKey { - /// The name of the entry. - pub name: String, - /// The value of the entry: a casper `Key` type. - pub key: String, -} - -impl ToBytes for NamedKey { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - buffer.extend(self.name.to_bytes()?); - buffer.extend(self.key.to_bytes()?); - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - self.name.serialized_length() + self.key.serialized_length() - } -} - -impl FromBytes for NamedKey { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (name, remainder) = String::from_bytes(bytes)?; - let (key, remainder) = String::from_bytes(remainder)?; - let named_key = NamedKey { name, key }; - Ok((named_key, remainder)) - } -} diff --git a/casper_types/src/phase.rs b/casper_types/src/phase.rs deleted file mode 100644 index 35586889..00000000 --- a/casper_types/src/phase.rs +++ /dev/null @@ -1,56 +0,0 @@ -// Can be removed once https://github.com/rust-lang/rustfmt/issues/3362 is resolved. -#[rustfmt::skip] -use alloc::vec; -use alloc::vec::Vec; - -use num_derive::{FromPrimitive, ToPrimitive}; -use num_traits::{FromPrimitive, ToPrimitive}; - -use crate::{ - bytesrepr::{Error, FromBytes, ToBytes}, - CLType, CLTyped, -}; - -/// The number of bytes in a serialized [`Phase`]. -pub const PHASE_SERIALIZED_LENGTH: usize = 1; - -/// The phase in which a given contract is executing. -#[derive(Debug, PartialEq, Eq, Clone, Copy, FromPrimitive, ToPrimitive)] -#[repr(u8)] -pub enum Phase { - /// Set while committing the genesis or upgrade configurations. - System = 0, - /// Set while executing the payment code of a deploy. - Payment = 1, - /// Set while executing the session code of a deploy. - Session = 2, - /// Set while finalizing payment at the end of a deploy. - FinalizePayment = 3, -} - -impl ToBytes for Phase { - fn to_bytes(&self) -> Result, Error> { - // NOTE: Assumed safe as [`Phase`] is represented as u8. - let id = self.to_u8().expect("Phase is represented as a u8"); - - Ok(vec![id]) - } - - fn serialized_length(&self) -> usize { - PHASE_SERIALIZED_LENGTH - } -} - -impl FromBytes for Phase { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (id, rest) = u8::from_bytes(bytes)?; - let phase = FromPrimitive::from_u8(id).ok_or(Error::Formatting)?; - Ok((phase, rest)) - } -} - -impl CLTyped for Phase { - fn cl_type() -> CLType { - CLType::U8 - } -} diff --git a/casper_types/src/protocol_version.rs b/casper_types/src/protocol_version.rs deleted file mode 100644 index fe889f1c..00000000 --- a/casper_types/src/protocol_version.rs +++ /dev/null @@ -1,550 +0,0 @@ -use alloc::{format, string::String, vec::Vec}; -use core::{convert::TryFrom, fmt, str::FromStr}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{de::Error as SerdeError, Deserialize, Deserializer, Serialize, Serializer}; - -use crate::{ - bytesrepr::{Error, FromBytes, ToBytes}, - ParseSemVerError, SemVer, -}; - -/// A newtype wrapping a [`SemVer`] which represents a Casper Platform protocol version. -#[derive(Copy, Clone, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -pub struct ProtocolVersion(SemVer); - -/// The result of [`ProtocolVersion::check_next_version`]. -#[derive(Debug, PartialEq, Eq)] -pub enum VersionCheckResult { - /// Upgrade possible. - Valid { - /// Is this a major protocol version upgrade? - is_major_version: bool, - }, - /// Upgrade is invalid. - Invalid, -} - -impl VersionCheckResult { - /// Checks if given version result is invalid. - /// - /// Invalid means that a given version can not be followed. - pub fn is_invalid(&self) -> bool { - matches!(self, VersionCheckResult::Invalid) - } - - /// Checks if given version is a major protocol version upgrade. - pub fn is_major_version(&self) -> bool { - match self { - VersionCheckResult::Valid { is_major_version } => *is_major_version, - VersionCheckResult::Invalid => false, - } - } -} - -impl ProtocolVersion { - /// Version 1.0.0. - pub const V1_0_0: ProtocolVersion = ProtocolVersion(SemVer { - major: 1, - minor: 0, - patch: 0, - }); - - /// Constructs a new `ProtocolVersion` from `version`. - pub const fn new(version: SemVer) -> ProtocolVersion { - ProtocolVersion(version) - } - - /// Constructs a new `ProtocolVersion` from the given semver parts. - pub const fn from_parts(major: u32, minor: u32, patch: u32) -> ProtocolVersion { - let sem_ver = SemVer::new(major, minor, patch); - Self::new(sem_ver) - } - - /// Returns the inner [`SemVer`]. - pub fn value(&self) -> SemVer { - self.0 - } - - /// Checks if next version can be followed. - pub fn check_next_version(&self, next: &ProtocolVersion) -> VersionCheckResult { - // Protocol major versions should increase monotonically by 1. - let major_bumped = self.0.major.saturating_add(1); - if next.0.major < self.0.major || next.0.major > major_bumped { - return VersionCheckResult::Invalid; - } - - if next.0.major == major_bumped { - return VersionCheckResult::Valid { - is_major_version: true, - }; - } - - // Covers the equal major versions - debug_assert_eq!(next.0.major, self.0.major); - - if next.0.minor < self.0.minor { - // Protocol minor versions within the same major version should not go backwards. - return VersionCheckResult::Invalid; - } - - if next.0.minor > self.0.minor { - return VersionCheckResult::Valid { - is_major_version: false, - }; - } - - // Code belows covers equal minor versions - debug_assert_eq!(next.0.minor, self.0.minor); - - // Protocol patch versions should increase monotonically but can be skipped. - if next.0.patch <= self.0.patch { - return VersionCheckResult::Invalid; - } - - VersionCheckResult::Valid { - is_major_version: false, - } - } - - /// Checks if given protocol version is compatible with current one. - /// - /// Two protocol versions with different major version are considered to be incompatible. - pub fn is_compatible_with(&self, version: &ProtocolVersion) -> bool { - self.0.major == version.0.major - } -} - -impl ToBytes for ProtocolVersion { - fn to_bytes(&self) -> Result, Error> { - self.value().to_bytes() - } - - fn serialized_length(&self) -> usize { - self.value().serialized_length() - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), Error> { - writer.extend(self.0.major.to_le_bytes()); - writer.extend(self.0.minor.to_le_bytes()); - writer.extend(self.0.patch.to_le_bytes()); - Ok(()) - } -} - -impl FromBytes for ProtocolVersion { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (version, rem) = SemVer::from_bytes(bytes)?; - let protocol_version = ProtocolVersion::new(version); - Ok((protocol_version, rem)) - } -} - -impl FromStr for ProtocolVersion { - type Err = ParseSemVerError; - - fn from_str(s: &str) -> Result { - let version = SemVer::try_from(s)?; - Ok(ProtocolVersion::new(version)) - } -} - -impl Serialize for ProtocolVersion { - fn serialize(&self, serializer: S) -> Result { - if serializer.is_human_readable() { - let str = format!("{}.{}.{}", self.0.major, self.0.minor, self.0.patch); - String::serialize(&str, serializer) - } else { - self.0.serialize(serializer) - } - } -} - -impl<'de> Deserialize<'de> for ProtocolVersion { - fn deserialize>(deserializer: D) -> Result { - let semver = if deserializer.is_human_readable() { - let value_as_string = String::deserialize(deserializer)?; - SemVer::try_from(value_as_string.as_str()).map_err(SerdeError::custom)? - } else { - SemVer::deserialize(deserializer)? - }; - Ok(ProtocolVersion(semver)) - } -} - -#[cfg(feature = "json-schema")] -impl JsonSchema for ProtocolVersion { - fn schema_name() -> String { - String::from("ProtocolVersion") - } - - fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { - let schema = gen.subschema_for::(); - let mut schema_object = schema.into_object(); - schema_object.metadata().description = Some("Casper Platform protocol version".to_string()); - schema_object.into() - } -} - -impl fmt::Display for ProtocolVersion { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - self.0.fmt(f) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::SemVer; - - #[test] - fn should_follow_version_with_optional_code() { - let value = VersionCheckResult::Valid { - is_major_version: false, - }; - assert!(!value.is_invalid()); - assert!(!value.is_major_version()); - } - - #[test] - fn should_follow_version_with_required_code() { - let value = VersionCheckResult::Valid { - is_major_version: true, - }; - assert!(!value.is_invalid()); - assert!(value.is_major_version()); - } - - #[test] - fn should_not_follow_version_with_invalid_code() { - let value = VersionCheckResult::Invalid; - assert!(value.is_invalid()); - assert!(!value.is_major_version()); - } - - #[test] - fn should_be_able_to_get_instance() { - let initial_value = SemVer::new(1, 0, 0); - let item = ProtocolVersion::new(initial_value); - assert_eq!(initial_value, item.value(), "should have equal value") - } - - #[test] - fn should_be_able_to_compare_two_instances() { - let lhs = ProtocolVersion::new(SemVer::new(1, 0, 0)); - let rhs = ProtocolVersion::new(SemVer::new(1, 0, 0)); - assert_eq!(lhs, rhs, "should be equal"); - let rhs = ProtocolVersion::new(SemVer::new(2, 0, 0)); - assert_ne!(lhs, rhs, "should not be equal") - } - - #[test] - fn should_be_able_to_default() { - let defaulted = ProtocolVersion::default(); - let expected = ProtocolVersion::new(SemVer::new(0, 0, 0)); - assert_eq!(defaulted, expected, "should be equal") - } - - #[test] - fn should_be_able_to_compare_relative_value() { - let lhs = ProtocolVersion::new(SemVer::new(2, 0, 0)); - let rhs = ProtocolVersion::new(SemVer::new(1, 0, 0)); - assert!(lhs > rhs, "should be gt"); - let rhs = ProtocolVersion::new(SemVer::new(2, 0, 0)); - assert!(lhs >= rhs, "should be gte"); - assert!(lhs <= rhs, "should be lte"); - let lhs = ProtocolVersion::new(SemVer::new(1, 0, 0)); - assert!(lhs < rhs, "should be lt"); - } - - #[test] - fn should_follow_major_version_upgrade() { - // If the upgrade protocol version is lower than or the same as EE's current in-use protocol - // version the upgrade is rejected and an error is returned; this includes the special case - // of a defaulted protocol version ( 0.0.0 ). - let prev = ProtocolVersion::new(SemVer::new(1, 0, 0)); - let next = ProtocolVersion::new(SemVer::new(2, 0, 0)); - assert!( - prev.check_next_version(&next).is_major_version(), - "should be major version" - ); - } - - #[test] - fn should_reject_if_major_version_decreases() { - let prev = ProtocolVersion::new(SemVer::new(10, 0, 0)); - let next = ProtocolVersion::new(SemVer::new(9, 0, 0)); - // Major version must not decrease ... - assert_eq!(prev.check_next_version(&next), VersionCheckResult::Invalid); - } - - #[test] - fn should_check_follows_minor_version_upgrade() { - // [major version] may remain the same in the case of a minor or patch version increase. - - // Minor version must not decrease within the same major version - let prev = ProtocolVersion::new(SemVer::new(1, 1, 0)); - let next = ProtocolVersion::new(SemVer::new(1, 2, 0)); - - let value = prev.check_next_version(&next); - assert!(!value.is_invalid(), "should be valid"); - assert!(!value.is_major_version(), "should not be a major version"); - } - - #[test] - fn should_not_care_if_minor_bump_resets_patch() { - let prev = ProtocolVersion::new(SemVer::new(1, 2, 0)); - let next = ProtocolVersion::new(SemVer::new(1, 3, 1)); - assert_eq!( - prev.check_next_version(&next), - VersionCheckResult::Valid { - is_major_version: false - } - ); - - let prev = ProtocolVersion::new(SemVer::new(1, 20, 42)); - let next = ProtocolVersion::new(SemVer::new(1, 30, 43)); - assert_eq!( - prev.check_next_version(&next), - VersionCheckResult::Valid { - is_major_version: false - } - ); - } - - #[test] - fn should_not_care_if_major_bump_resets_minor_or_patch() { - // A major version increase resets both the minor and patch versions to ( 0.0 ). - let prev = ProtocolVersion::new(SemVer::new(1, 0, 0)); - let next = ProtocolVersion::new(SemVer::new(2, 1, 0)); - assert_eq!( - prev.check_next_version(&next), - VersionCheckResult::Valid { - is_major_version: true - } - ); - - let next = ProtocolVersion::new(SemVer::new(2, 0, 1)); - assert_eq!( - prev.check_next_version(&next), - VersionCheckResult::Valid { - is_major_version: true - } - ); - - let next = ProtocolVersion::new(SemVer::new(2, 1, 1)); - assert_eq!( - prev.check_next_version(&next), - VersionCheckResult::Valid { - is_major_version: true - } - ); - } - - #[test] - fn should_reject_patch_version_rollback() { - // Patch version must not decrease or remain the same within the same major and minor - // version pair, but may skip. - let prev = ProtocolVersion::new(SemVer::new(1, 0, 42)); - let next = ProtocolVersion::new(SemVer::new(1, 0, 41)); - assert_eq!(prev.check_next_version(&next), VersionCheckResult::Invalid); - let next = ProtocolVersion::new(SemVer::new(1, 0, 13)); - assert_eq!(prev.check_next_version(&next), VersionCheckResult::Invalid); - } - - #[test] - fn should_accept_patch_version_update_with_optional_code() { - let prev = ProtocolVersion::new(SemVer::new(1, 0, 0)); - let next = ProtocolVersion::new(SemVer::new(1, 0, 1)); - let value = prev.check_next_version(&next); - assert!(!value.is_invalid(), "should be valid"); - assert!(!value.is_major_version(), "should not be a major version"); - - let prev = ProtocolVersion::new(SemVer::new(1, 0, 8)); - let next = ProtocolVersion::new(SemVer::new(1, 0, 42)); - let value = prev.check_next_version(&next); - assert!(!value.is_invalid(), "should be valid"); - assert!(!value.is_major_version(), "should not be a major version"); - } - - #[test] - fn should_accept_minor_version_update_with_optional_code() { - // installer is optional for minor bump - let prev = ProtocolVersion::new(SemVer::new(1, 0, 0)); - let next = ProtocolVersion::new(SemVer::new(1, 1, 0)); - let value = prev.check_next_version(&next); - assert!(!value.is_invalid(), "should be valid"); - assert!(!value.is_major_version(), "should not be a major version"); - - let prev = ProtocolVersion::new(SemVer::new(3, 98, 0)); - let next = ProtocolVersion::new(SemVer::new(3, 99, 0)); - let value = prev.check_next_version(&next); - assert!(!value.is_invalid(), "should be valid"); - assert!(!value.is_major_version(), "should not be a major version"); - } - - #[test] - fn should_allow_skip_minor_version_within_major_version() { - let prev = ProtocolVersion::new(SemVer::new(1, 1, 0)); - - let next = ProtocolVersion::new(SemVer::new(1, 3, 0)); - assert_eq!( - prev.check_next_version(&next), - VersionCheckResult::Valid { - is_major_version: false - } - ); - - let next = ProtocolVersion::new(SemVer::new(1, 7, 0)); - assert_eq!( - prev.check_next_version(&next), - VersionCheckResult::Valid { - is_major_version: false - } - ); - } - - #[test] - fn should_allow_skip_patch_version_within_minor_version() { - let prev = ProtocolVersion::new(SemVer::new(1, 1, 0)); - - let next = ProtocolVersion::new(SemVer::new(1, 1, 2)); - assert_eq!( - prev.check_next_version(&next), - VersionCheckResult::Valid { - is_major_version: false - } - ); - } - - #[test] - fn should_allow_skipped_minor_and_patch_on_major_bump() { - // skip minor - let prev = ProtocolVersion::new(SemVer::new(1, 0, 0)); - let next = ProtocolVersion::new(SemVer::new(2, 1, 0)); - assert_eq!( - prev.check_next_version(&next), - VersionCheckResult::Valid { - is_major_version: true - } - ); - - // skip patch - let prev = ProtocolVersion::new(SemVer::new(1, 0, 0)); - let next = ProtocolVersion::new(SemVer::new(2, 0, 1)); - assert_eq!( - prev.check_next_version(&next), - VersionCheckResult::Valid { - is_major_version: true - } - ); - - // skip many minors and patches - let prev = ProtocolVersion::new(SemVer::new(1, 0, 0)); - let next = ProtocolVersion::new(SemVer::new(2, 3, 10)); - assert_eq!( - prev.check_next_version(&next), - VersionCheckResult::Valid { - is_major_version: true - } - ); - } - - #[test] - fn should_allow_code_on_major_update() { - // major upgrade requires installer to be present - let prev = ProtocolVersion::new(SemVer::new(1, 0, 0)); - let next = ProtocolVersion::new(SemVer::new(2, 0, 0)); - assert!( - prev.check_next_version(&next).is_major_version(), - "should be major version" - ); - - let prev = ProtocolVersion::new(SemVer::new(2, 99, 99)); - let next = ProtocolVersion::new(SemVer::new(3, 0, 0)); - assert!( - prev.check_next_version(&next).is_major_version(), - "should be major version" - ); - } - - #[test] - fn should_not_skip_major_version() { - // can bump only by 1 - let prev = ProtocolVersion::new(SemVer::new(1, 0, 0)); - let next = ProtocolVersion::new(SemVer::new(3, 0, 0)); - assert_eq!(prev.check_next_version(&next), VersionCheckResult::Invalid); - } - - #[test] - fn should_reject_major_version_rollback() { - // can bump forward - let prev = ProtocolVersion::new(SemVer::new(2, 0, 0)); - let next = ProtocolVersion::new(SemVer::new(0, 0, 0)); - assert_eq!(prev.check_next_version(&next), VersionCheckResult::Invalid); - } - - #[test] - fn should_check_same_version_is_invalid() { - for ver in &[ - ProtocolVersion::from_parts(1, 0, 0), - ProtocolVersion::from_parts(1, 2, 0), - ProtocolVersion::from_parts(1, 2, 3), - ] { - assert_eq!(ver.check_next_version(ver), VersionCheckResult::Invalid); - } - } - - #[test] - fn should_not_be_compatible_with_different_major_version() { - let current = ProtocolVersion::from_parts(1, 2, 3); - let other = ProtocolVersion::from_parts(2, 5, 6); - assert!(!current.is_compatible_with(&other)); - - let current = ProtocolVersion::from_parts(1, 0, 0); - let other = ProtocolVersion::from_parts(2, 0, 0); - assert!(!current.is_compatible_with(&other)); - } - - #[test] - fn should_be_compatible_with_equal_major_version_backwards() { - let current = ProtocolVersion::from_parts(1, 99, 99); - let other = ProtocolVersion::from_parts(1, 0, 0); - assert!(current.is_compatible_with(&other)); - } - - #[test] - fn should_be_compatible_with_equal_major_version_forwards() { - let current = ProtocolVersion::from_parts(1, 0, 0); - let other = ProtocolVersion::from_parts(1, 99, 99); - assert!(current.is_compatible_with(&other)); - } - - #[test] - fn should_serialize_to_json_properly() { - let protocol_version = ProtocolVersion::from_parts(1, 1, 1); - let json = serde_json::to_string(&protocol_version).unwrap(); - let expected = "\"1.1.1\""; - assert_eq!(json, expected); - } - - #[test] - fn serialize_roundtrip() { - let protocol_version = ProtocolVersion::from_parts(1, 1, 1); - let serialized_json = serde_json::to_string(&protocol_version).unwrap(); - assert_eq!( - protocol_version, - serde_json::from_str(&serialized_json).unwrap() - ); - - let serialized_bincode = bincode::serialize(&protocol_version).unwrap(); - assert_eq!( - protocol_version, - bincode::deserialize(&serialized_bincode).unwrap() - ); - } -} diff --git a/casper_types/src/runtime_args.rs b/casper_types/src/runtime_args.rs deleted file mode 100644 index 271de625..00000000 --- a/casper_types/src/runtime_args.rs +++ /dev/null @@ -1,368 +0,0 @@ -//! Home of RuntimeArgs for calling contracts - -// TODO - remove once schemars stops causing warning. -#![allow(clippy::field_reassign_with_default)] - -use alloc::{collections::BTreeMap, string::String, vec::Vec}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -use crate::{ - bytesrepr::{self, Error, FromBytes, ToBytes}, - CLType, CLTyped, CLValue, CLValueError, U512, -}; -/// Named arguments to a contract. -#[derive(PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -pub struct NamedArg(String, CLValue); - -impl NamedArg { - /// Returns a new `NamedArg`. - pub fn new(name: String, value: CLValue) -> Self { - NamedArg(name, value) - } - - /// Returns the name of the named arg. - pub fn name(&self) -> &str { - &self.0 - } - - /// Returns the value of the named arg. - pub fn cl_value(&self) -> &CLValue { - &self.1 - } - - /// Returns a mutable reference to the value of the named arg. - pub fn cl_value_mut(&mut self) -> &mut CLValue { - &mut self.1 - } -} - -impl From<(String, CLValue)> for NamedArg { - fn from((name, value): (String, CLValue)) -> NamedArg { - NamedArg(name, value) - } -} - -impl ToBytes for NamedArg { - fn to_bytes(&self) -> Result, Error> { - let mut result = bytesrepr::allocate_buffer(self)?; - result.append(&mut self.0.to_bytes()?); - result.append(&mut self.1.to_bytes()?); - Ok(result) - } - - fn serialized_length(&self) -> usize { - self.0.serialized_length() + self.1.serialized_length() - } -} - -impl FromBytes for NamedArg { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (name, remainder) = String::from_bytes(bytes)?; - let (cl_value, remainder) = CLValue::from_bytes(remainder)?; - Ok((NamedArg(name, cl_value), remainder)) - } -} - -/// Represents a collection of arguments passed to a smart contract. -#[derive(PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Serialize, Deserialize, Debug, Default)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -pub struct RuntimeArgs(Vec); - -impl RuntimeArgs { - /// Create an empty [`RuntimeArgs`] instance. - pub fn new() -> RuntimeArgs { - RuntimeArgs::default() - } - - /// A wrapper that lets you easily and safely create runtime arguments. - /// - /// This method is useful when you have to construct a [`RuntimeArgs`] with multiple entries, - /// but error handling at given call site would require to have a match statement for each - /// [`RuntimeArgs::insert`] call. With this method you can use ? operator inside the closure and - /// then handle single result. When `try_block` will be stabilized this method could be - /// deprecated in favor of using those blocks. - pub fn try_new(func: F) -> Result - where - F: FnOnce(&mut RuntimeArgs) -> Result<(), CLValueError>, - { - let mut runtime_args = RuntimeArgs::new(); - func(&mut runtime_args)?; - Ok(runtime_args) - } - - /// Gets an argument by its name. - pub fn get(&self, name: &str) -> Option<&CLValue> { - self.0.iter().find_map(|NamedArg(named_name, named_value)| { - if named_name == name { - Some(named_value) - } else { - None - } - }) - } - - /// Gets the length of the collection. - pub fn len(&self) -> usize { - self.0.len() - } - - /// Returns `true` if the collection of arguments is empty. - pub fn is_empty(&self) -> bool { - self.0.is_empty() - } - - /// Inserts a new named argument into the collection. - pub fn insert(&mut self, key: K, value: V) -> Result<(), CLValueError> - where - K: Into, - V: CLTyped + ToBytes, - { - let cl_value = CLValue::from_t(value)?; - self.0.push(NamedArg(key.into(), cl_value)); - Ok(()) - } - - /// Inserts a new named argument into the collection. - pub fn insert_cl_value(&mut self, key: K, cl_value: CLValue) - where - K: Into, - { - self.0.push(NamedArg(key.into(), cl_value)); - } - - /// Returns all the values of the named args. - pub fn to_values(&self) -> Vec<&CLValue> { - self.0.iter().map(|NamedArg(_name, value)| value).collect() - } - - /// Returns an iterator of references over all arguments in insertion order. - pub fn named_args(&self) -> impl Iterator { - self.0.iter() - } - - /// Returns an iterator of mutable references over all arguments in insertion order. - pub fn named_args_mut(&mut self) -> impl Iterator { - self.0.iter_mut() - } - - /// Returns the numeric value of `name` arg from the runtime arguments or defaults to - /// 0 if that arg doesn't exist or is not an integer type. - /// - /// Supported [`CLType`]s for numeric conversions are U64, and U512. - /// - /// Returns an error if parsing the arg fails. - pub fn try_get_number(&self, name: &str) -> Result { - let amount_arg = match self.get(name) { - None => return Ok(U512::zero()), - Some(arg) => arg, - }; - match amount_arg.cl_type() { - CLType::U512 => amount_arg.clone().into_t::(), - CLType::U64 => amount_arg.clone().into_t::().map(U512::from), - _ => Ok(U512::zero()), - } - } -} - -impl From> for RuntimeArgs { - fn from(values: Vec) -> Self { - RuntimeArgs(values) - } -} - -impl From> for RuntimeArgs { - fn from(cl_values: BTreeMap) -> RuntimeArgs { - RuntimeArgs(cl_values.into_iter().map(NamedArg::from).collect()) - } -} - -impl From for BTreeMap { - fn from(args: RuntimeArgs) -> BTreeMap { - let mut map = BTreeMap::new(); - for named in args.0 { - map.insert(named.0, named.1); - } - map - } -} - -impl ToBytes for RuntimeArgs { - fn to_bytes(&self) -> Result, Error> { - self.0.to_bytes() - } - - fn serialized_length(&self) -> usize { - self.0.serialized_length() - } -} - -impl FromBytes for RuntimeArgs { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (args, remainder) = Vec::::from_bytes(bytes)?; - Ok((RuntimeArgs(args), remainder)) - } -} - -/// Macro that makes it easier to construct named arguments. -/// -/// NOTE: This macro does not propagate possible errors that could occur while creating a -/// [`crate::CLValue`]. For such cases creating [`RuntimeArgs`] manually is recommended. -/// -/// # Example usage -/// ``` -/// use casper_types::{RuntimeArgs, runtime_args}; -/// let _named_args = runtime_args! { -/// "foo" => 42, -/// "bar" => "Hello, world!" -/// }; -/// ``` -#[macro_export] -macro_rules! runtime_args { - () => (RuntimeArgs::new()); - ( $($key:expr => $value:expr,)+ ) => (runtime_args!($($key => $value),+)); - ( $($key:expr => $value:expr),* ) => { - { - let mut named_args = RuntimeArgs::new(); - $( - named_args.insert($key, $value).unwrap(); - )* - named_args - } - }; -} - -#[cfg(test)] -mod tests { - use super::*; - - const ARG_AMOUNT: &str = "amount"; - - #[test] - fn test_runtime_args() { - let arg1 = CLValue::from_t(1).unwrap(); - let arg2 = CLValue::from_t("Foo").unwrap(); - let arg3 = CLValue::from_t(Some(1)).unwrap(); - let args = { - let mut map = BTreeMap::new(); - map.insert("bar".into(), arg2.clone()); - map.insert("foo".into(), arg1.clone()); - map.insert("qwer".into(), arg3.clone()); - map - }; - let runtime_args = RuntimeArgs::from(args); - assert_eq!(runtime_args.get("qwer"), Some(&arg3)); - assert_eq!(runtime_args.get("foo"), Some(&arg1)); - assert_eq!(runtime_args.get("bar"), Some(&arg2)); - assert_eq!(runtime_args.get("aaa"), None); - - // Ensure macro works - - let runtime_args_2 = runtime_args! { - "bar" => "Foo", - "foo" => 1i32, - "qwer" => Some(1i32), - }; - assert_eq!(runtime_args, runtime_args_2); - } - - #[test] - fn empty_macro() { - assert_eq!(runtime_args! {}, RuntimeArgs::new()); - } - - #[test] - fn btreemap_compat() { - // This test assumes same serialization format as BTreeMap - let runtime_args_1 = runtime_args! { - "bar" => "Foo", - "foo" => 1i32, - "qwer" => Some(1i32), - }; - let tagless = runtime_args_1.to_bytes().unwrap().to_vec(); - - let mut runtime_args_2 = BTreeMap::new(); - runtime_args_2.insert(String::from("bar"), CLValue::from_t("Foo").unwrap()); - runtime_args_2.insert(String::from("foo"), CLValue::from_t(1i32).unwrap()); - runtime_args_2.insert(String::from("qwer"), CLValue::from_t(Some(1i32)).unwrap()); - - assert_eq!(tagless, runtime_args_2.to_bytes().unwrap()); - } - - #[test] - fn named_serialization_roundtrip() { - let args = runtime_args! { - "foo" => 1i32, - }; - bytesrepr::test_serialization_roundtrip(&args); - } - - #[test] - fn should_create_args_with() { - let res = RuntimeArgs::try_new(|runtime_args| { - runtime_args.insert(String::from("foo"), 123)?; - runtime_args.insert(String::from("bar"), 456)?; - Ok(()) - }); - - let expected = runtime_args! { - "foo" => 123, - "bar" => 456, - }; - assert!(matches!(res, Ok(args) if expected == args)); - } - - #[test] - fn try_get_number_should_work() { - let mut args = RuntimeArgs::new(); - args.insert(ARG_AMOUNT, 0u64).expect("is ok"); - assert_eq!(args.try_get_number(ARG_AMOUNT).unwrap(), U512::zero()); - - let mut args = RuntimeArgs::new(); - args.insert(ARG_AMOUNT, U512::zero()).expect("is ok"); - assert_eq!(args.try_get_number(ARG_AMOUNT).unwrap(), U512::zero()); - - let args = RuntimeArgs::new(); - assert_eq!(args.try_get_number(ARG_AMOUNT).unwrap(), U512::zero()); - - let hundred = 100u64; - - let mut args = RuntimeArgs::new(); - let input = U512::from(hundred); - args.insert(ARG_AMOUNT, input).expect("is ok"); - assert_eq!(args.try_get_number(ARG_AMOUNT).unwrap(), input); - - let mut args = RuntimeArgs::new(); - args.insert(ARG_AMOUNT, hundred).expect("is ok"); - assert_eq!( - args.try_get_number(ARG_AMOUNT).unwrap(), - U512::from(hundred) - ); - } - - #[test] - fn try_get_number_should_return_zero_for_non_numeric_type() { - let mut args = RuntimeArgs::new(); - args.insert(ARG_AMOUNT, "Non-numeric-string").unwrap(); - assert_eq!( - args.try_get_number(ARG_AMOUNT).expect("should get amount"), - U512::zero() - ); - } - - #[test] - fn try_get_number_should_return_zero_if_amount_is_missing() { - let args = RuntimeArgs::new(); - assert_eq!( - args.try_get_number(ARG_AMOUNT).expect("should get amount"), - U512::zero() - ); - } -} diff --git a/casper_types/src/semver.rs b/casper_types/src/semver.rs deleted file mode 100644 index 5feafe53..00000000 --- a/casper_types/src/semver.rs +++ /dev/null @@ -1,152 +0,0 @@ -use alloc::vec::Vec; -use core::{ - convert::TryFrom, - fmt::{self, Display, Formatter}, - num::ParseIntError, -}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -use serde::{Deserialize, Serialize}; - -use crate::bytesrepr::{self, Error, FromBytes, ToBytes, U32_SERIALIZED_LENGTH}; - -/// Length of SemVer when serialized -pub const SEM_VER_SERIALIZED_LENGTH: usize = 3 * U32_SERIALIZED_LENGTH; - -/// A struct for semantic versioning. -#[derive( - Copy, Clone, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize, -)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -pub struct SemVer { - /// Major version. - pub major: u32, - /// Minor version. - pub minor: u32, - /// Patch version. - pub patch: u32, -} - -impl SemVer { - /// Version 1.0.0. - pub const V1_0_0: SemVer = SemVer { - major: 1, - minor: 0, - patch: 0, - }; - - /// Constructs a new `SemVer` from the given semver parts. - pub const fn new(major: u32, minor: u32, patch: u32) -> SemVer { - SemVer { - major, - minor, - patch, - } - } -} - -impl ToBytes for SemVer { - fn to_bytes(&self) -> Result, Error> { - let mut ret = bytesrepr::unchecked_allocate_buffer(self); - ret.append(&mut self.major.to_bytes()?); - ret.append(&mut self.minor.to_bytes()?); - ret.append(&mut self.patch.to_bytes()?); - Ok(ret) - } - - fn serialized_length(&self) -> usize { - SEM_VER_SERIALIZED_LENGTH - } -} - -impl FromBytes for SemVer { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (major, rem): (u32, &[u8]) = FromBytes::from_bytes(bytes)?; - let (minor, rem): (u32, &[u8]) = FromBytes::from_bytes(rem)?; - let (patch, rem): (u32, &[u8]) = FromBytes::from_bytes(rem)?; - Ok((SemVer::new(major, minor, patch), rem)) - } -} - -impl Display for SemVer { - fn fmt(&self, f: &mut Formatter) -> fmt::Result { - write!(f, "{}.{}.{}", self.major, self.minor, self.patch) - } -} - -/// Parsing error when creating a SemVer. -#[derive(Debug, Clone, PartialEq, Eq)] -pub enum ParseSemVerError { - /// Invalid version format. - InvalidVersionFormat, - /// Error parsing an integer. - ParseIntError(ParseIntError), -} - -impl Display for ParseSemVerError { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - match self { - ParseSemVerError::InvalidVersionFormat => formatter.write_str("invalid version format"), - ParseSemVerError::ParseIntError(error) => error.fmt(formatter), - } - } -} - -impl From for ParseSemVerError { - fn from(error: ParseIntError) -> ParseSemVerError { - ParseSemVerError::ParseIntError(error) - } -} - -impl TryFrom<&str> for SemVer { - type Error = ParseSemVerError; - fn try_from(value: &str) -> Result { - let tokens: Vec<&str> = value.split('.').collect(); - if tokens.len() != 3 { - return Err(ParseSemVerError::InvalidVersionFormat); - } - - Ok(SemVer { - major: tokens[0].parse()?, - minor: tokens[1].parse()?, - patch: tokens[2].parse()?, - }) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use core::convert::TryInto; - - #[test] - fn should_compare_semver_versions() { - assert!(SemVer::new(0, 0, 0) < SemVer::new(1, 2, 3)); - assert!(SemVer::new(1, 1, 0) < SemVer::new(1, 2, 0)); - assert!(SemVer::new(1, 0, 0) < SemVer::new(1, 2, 0)); - assert!(SemVer::new(1, 0, 0) < SemVer::new(1, 2, 3)); - assert!(SemVer::new(1, 2, 0) < SemVer::new(1, 2, 3)); - assert!(SemVer::new(1, 2, 3) == SemVer::new(1, 2, 3)); - assert!(SemVer::new(1, 2, 3) >= SemVer::new(1, 2, 3)); - assert!(SemVer::new(1, 2, 3) <= SemVer::new(1, 2, 3)); - assert!(SemVer::new(2, 0, 0) >= SemVer::new(1, 99, 99)); - assert!(SemVer::new(2, 0, 0) > SemVer::new(1, 99, 99)); - } - - #[test] - fn parse_from_string() { - let ver1: SemVer = "100.20.3".try_into().expect("should parse"); - assert_eq!(ver1, SemVer::new(100, 20, 3)); - let ver2: SemVer = "0.0.1".try_into().expect("should parse"); - assert_eq!(ver2, SemVer::new(0, 0, 1)); - - assert!(SemVer::try_from("1.a.2.3").is_err()); - assert!(SemVer::try_from("1. 2.3").is_err()); - assert!(SemVer::try_from("12345124361461.0.1").is_err()); - assert!(SemVer::try_from("1.2.3.4").is_err()); - assert!(SemVer::try_from("1.2").is_err()); - assert!(SemVer::try_from("1").is_err()); - assert!(SemVer::try_from("0").is_err()); - } -} diff --git a/casper_types/src/stored_value.rs b/casper_types/src/stored_value.rs deleted file mode 100644 index d8190078..00000000 --- a/casper_types/src/stored_value.rs +++ /dev/null @@ -1,464 +0,0 @@ -mod type_mismatch; - -use alloc::{ - boxed::Box, - string::{String, ToString}, - vec::Vec, -}; -use core::{convert::TryFrom, fmt::Debug}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -use serde::{de, ser, Deserialize, Deserializer, Serialize, Serializer}; -use serde_bytes::ByteBuf; - -use crate::{ - account::Account, - bytesrepr::{self, FromBytes, ToBytes, U8_SERIALIZED_LENGTH}, - contracts::ContractPackage, - system::auction::{Bid, EraInfo, UnbondingPurse, WithdrawPurse}, - CLValue, Contract, ContractWasm, DeployInfo, Transfer, -}; -pub use type_mismatch::TypeMismatch; - -#[allow(clippy::large_enum_variant)] -#[repr(u8)] -enum Tag { - CLValue = 0, - Account = 1, - ContractWasm = 2, - Contract = 3, - ContractPackage = 4, - Transfer = 5, - DeployInfo = 6, - EraInfo = 7, - Bid = 8, - Withdraw = 9, - Unbonding = 10, -} - -#[allow(clippy::large_enum_variant)] -#[derive(Eq, PartialEq, Clone, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -/// StoredValue represents all possible variants of values stored in Global State. -pub enum StoredValue { - /// Variant that stores [`CLValue`]. - CLValue(CLValue), - /// Variant that stores [`Account`]. - Account(Account), - /// Variant that stores [`ContractWasm`]. - ContractWasm(ContractWasm), - /// Variant that stores [`Contract`]. - Contract(Contract), - /// Variant that stores [`ContractPackage`]. - ContractPackage(ContractPackage), - /// Variant that stores [`Transfer`]. - Transfer(Transfer), - /// Variant that stores [`DeployInfo`]. - DeployInfo(DeployInfo), - /// Variant that stores [`EraInfo`]. - EraInfo(EraInfo), - /// Variant that stores [`Bid`]. - Bid(Box), - /// Variant that stores withdraw information. - Withdraw(Vec), - /// Variant that stores unbonding information. - Unbonding(Vec), -} - -impl StoredValue { - /// Returns a wrapped [`CLValue`] if this is a `CLValue` variant. - pub fn as_cl_value(&self) -> Option<&CLValue> { - match self { - StoredValue::CLValue(cl_value) => Some(cl_value), - _ => None, - } - } - - /// Returns a wrapped [`Account`] if this is an `Account` variant. - pub fn as_account(&self) -> Option<&Account> { - match self { - StoredValue::Account(account) => Some(account), - _ => None, - } - } - - /// Returns a wrapped [`Contract`] if this is a `Contract` variant. - pub fn as_contract(&self) -> Option<&Contract> { - match self { - StoredValue::Contract(contract) => Some(contract), - _ => None, - } - } - - /// Returns a wrapped [`ContractWasm`] if this is a `ContractWasm` variant. - pub fn as_contract_wasm(&self) -> Option<&ContractWasm> { - match self { - StoredValue::ContractWasm(contract_wasm) => Some(contract_wasm), - _ => None, - } - } - - /// Returns a wrapped [`ContractPackage`] if this is a `ContractPackage` variant. - pub fn as_contract_package(&self) -> Option<&ContractPackage> { - match self { - StoredValue::ContractPackage(contract_package) => Some(contract_package), - _ => None, - } - } - - /// Returns a wrapped [`DeployInfo`] if this is a `DeployInfo` variant. - pub fn as_deploy_info(&self) -> Option<&DeployInfo> { - match self { - StoredValue::DeployInfo(deploy_info) => Some(deploy_info), - _ => None, - } - } - - /// Returns a wrapped [`EraInfo`] if this is a `EraInfo` variant. - pub fn as_era_info(&self) -> Option<&EraInfo> { - match self { - StoredValue::EraInfo(era_info) => Some(era_info), - _ => None, - } - } - - /// Returns a wrapped [`Bid`] if this is a `Bid` variant. - pub fn as_bid(&self) -> Option<&Bid> { - match self { - StoredValue::Bid(bid) => Some(bid), - _ => None, - } - } - - /// Returns a wrapped list of [`WithdrawPurse`]s if this is a `Withdraw` variant. - pub fn as_withdraw(&self) -> Option<&Vec> { - match self { - StoredValue::Withdraw(withdraw_purses) => Some(withdraw_purses), - _ => None, - } - } - - /// Returns a wrapped list of [`UnbondingPurse`]s if this is a `Unbonding` variant. - pub fn as_unbonding(&self) -> Option<&Vec> { - match self { - StoredValue::Unbonding(unbonding_purses) => Some(unbonding_purses), - _ => None, - } - } - - /// Returns the type name of the [`StoredValue`] enum variant. - /// - /// For [`CLValue`] variants it will return the name of the [`CLType`](crate::cl_type::CLType) - pub fn type_name(&self) -> String { - match self { - StoredValue::CLValue(cl_value) => format!("{:?}", cl_value.cl_type()), - StoredValue::Account(_) => "Account".to_string(), - StoredValue::ContractWasm(_) => "ContractWasm".to_string(), - StoredValue::Contract(_) => "Contract".to_string(), - StoredValue::ContractPackage(_) => "ContractPackage".to_string(), - StoredValue::Transfer(_) => "Transfer".to_string(), - StoredValue::DeployInfo(_) => "DeployInfo".to_string(), - StoredValue::EraInfo(_) => "EraInfo".to_string(), - StoredValue::Bid(_) => "Bid".to_string(), - StoredValue::Withdraw(_) => "Withdraw".to_string(), - StoredValue::Unbonding(_) => "Unbonding".to_string(), - } - } - - fn tag(&self) -> Tag { - match self { - StoredValue::CLValue(_) => Tag::CLValue, - StoredValue::Account(_) => Tag::Account, - StoredValue::ContractWasm(_) => Tag::ContractWasm, - StoredValue::Contract(_) => Tag::Contract, - StoredValue::ContractPackage(_) => Tag::ContractPackage, - StoredValue::Transfer(_) => Tag::Transfer, - StoredValue::DeployInfo(_) => Tag::DeployInfo, - StoredValue::EraInfo(_) => Tag::EraInfo, - StoredValue::Bid(_) => Tag::Bid, - StoredValue::Withdraw(_) => Tag::Withdraw, - StoredValue::Unbonding(_) => Tag::Unbonding, - } - } -} - -impl From for StoredValue { - fn from(value: CLValue) -> StoredValue { - StoredValue::CLValue(value) - } -} -impl From for StoredValue { - fn from(value: Account) -> StoredValue { - StoredValue::Account(value) - } -} -impl From for StoredValue { - fn from(value: ContractWasm) -> StoredValue { - StoredValue::ContractWasm(value) - } -} -impl From for StoredValue { - fn from(value: Contract) -> StoredValue { - StoredValue::Contract(value) - } -} -impl From for StoredValue { - fn from(value: ContractPackage) -> StoredValue { - StoredValue::ContractPackage(value) - } -} -impl From for StoredValue { - fn from(bid: Bid) -> StoredValue { - StoredValue::Bid(Box::new(bid)) - } -} - -impl TryFrom for CLValue { - type Error = TypeMismatch; - - fn try_from(stored_value: StoredValue) -> Result { - let type_name = stored_value.type_name(); - match stored_value { - StoredValue::CLValue(cl_value) => Ok(cl_value), - StoredValue::ContractPackage(contract_package) => Ok(CLValue::from_t(contract_package) - .map_err(|_error| TypeMismatch::new("ContractPackage".to_string(), type_name))?), - _ => Err(TypeMismatch::new("CLValue".to_string(), type_name)), - } - } -} - -impl TryFrom for Account { - type Error = TypeMismatch; - - fn try_from(stored_value: StoredValue) -> Result { - match stored_value { - StoredValue::Account(account) => Ok(account), - _ => Err(TypeMismatch::new( - "Account".to_string(), - stored_value.type_name(), - )), - } - } -} - -impl TryFrom for ContractWasm { - type Error = TypeMismatch; - - fn try_from(stored_value: StoredValue) -> Result { - match stored_value { - StoredValue::ContractWasm(contract_wasm) => Ok(contract_wasm), - _ => Err(TypeMismatch::new( - "ContractWasm".to_string(), - stored_value.type_name(), - )), - } - } -} - -impl TryFrom for ContractPackage { - type Error = TypeMismatch; - - fn try_from(stored_value: StoredValue) -> Result { - match stored_value { - StoredValue::ContractPackage(contract_package) => Ok(contract_package), - _ => Err(TypeMismatch::new( - "ContractPackage".to_string(), - stored_value.type_name(), - )), - } - } -} - -impl TryFrom for Contract { - type Error = TypeMismatch; - - fn try_from(stored_value: StoredValue) -> Result { - match stored_value { - StoredValue::Contract(contract) => Ok(contract), - _ => Err(TypeMismatch::new( - "Contract".to_string(), - stored_value.type_name(), - )), - } - } -} - -impl TryFrom for Transfer { - type Error = TypeMismatch; - - fn try_from(value: StoredValue) -> Result { - match value { - StoredValue::Transfer(transfer) => Ok(transfer), - _ => Err(TypeMismatch::new("Transfer".to_string(), value.type_name())), - } - } -} - -impl TryFrom for DeployInfo { - type Error = TypeMismatch; - - fn try_from(value: StoredValue) -> Result { - match value { - StoredValue::DeployInfo(deploy_info) => Ok(deploy_info), - _ => Err(TypeMismatch::new( - "DeployInfo".to_string(), - value.type_name(), - )), - } - } -} - -impl TryFrom for EraInfo { - type Error = TypeMismatch; - - fn try_from(value: StoredValue) -> Result { - match value { - StoredValue::EraInfo(era_info) => Ok(era_info), - _ => Err(TypeMismatch::new("EraInfo".to_string(), value.type_name())), - } - } -} - -impl ToBytes for StoredValue { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut result = bytesrepr::allocate_buffer(self)?; - let (tag, mut serialized_data) = match self { - StoredValue::CLValue(cl_value) => (Tag::CLValue, cl_value.to_bytes()?), - StoredValue::Account(account) => (Tag::Account, account.to_bytes()?), - StoredValue::ContractWasm(contract_wasm) => { - (Tag::ContractWasm, contract_wasm.to_bytes()?) - } - StoredValue::Contract(contract_header) => (Tag::Contract, contract_header.to_bytes()?), - StoredValue::ContractPackage(contract_package) => { - (Tag::ContractPackage, contract_package.to_bytes()?) - } - StoredValue::Transfer(transfer) => (Tag::Transfer, transfer.to_bytes()?), - StoredValue::DeployInfo(deploy_info) => (Tag::DeployInfo, deploy_info.to_bytes()?), - StoredValue::EraInfo(era_info) => (Tag::EraInfo, era_info.to_bytes()?), - StoredValue::Bid(bid) => (Tag::Bid, bid.to_bytes()?), - StoredValue::Withdraw(withdraw_purses) => (Tag::Withdraw, withdraw_purses.to_bytes()?), - StoredValue::Unbonding(unbonding_purses) => { - (Tag::Unbonding, unbonding_purses.to_bytes()?) - } - }; - result.push(tag as u8); - result.append(&mut serialized_data); - Ok(result) - } - - fn serialized_length(&self) -> usize { - U8_SERIALIZED_LENGTH - + match self { - StoredValue::CLValue(cl_value) => cl_value.serialized_length(), - StoredValue::Account(account) => account.serialized_length(), - StoredValue::ContractWasm(contract_wasm) => contract_wasm.serialized_length(), - StoredValue::Contract(contract_header) => contract_header.serialized_length(), - StoredValue::ContractPackage(contract_package) => { - contract_package.serialized_length() - } - StoredValue::Transfer(transfer) => transfer.serialized_length(), - StoredValue::DeployInfo(deploy_info) => deploy_info.serialized_length(), - StoredValue::EraInfo(era_info) => era_info.serialized_length(), - StoredValue::Bid(bid) => bid.serialized_length(), - StoredValue::Withdraw(withdraw_purses) => withdraw_purses.serialized_length(), - StoredValue::Unbonding(unbonding_purses) => unbonding_purses.serialized_length(), - } - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - writer.push(self.tag() as u8); - match self { - StoredValue::CLValue(cl_value) => cl_value.write_bytes(writer)?, - StoredValue::Account(account) => account.write_bytes(writer)?, - StoredValue::ContractWasm(contract_wasm) => contract_wasm.write_bytes(writer)?, - StoredValue::Contract(contract_header) => contract_header.write_bytes(writer)?, - StoredValue::ContractPackage(contract_package) => { - contract_package.write_bytes(writer)? - } - StoredValue::Transfer(transfer) => transfer.write_bytes(writer)?, - StoredValue::DeployInfo(deploy_info) => deploy_info.write_bytes(writer)?, - StoredValue::EraInfo(era_info) => era_info.write_bytes(writer)?, - StoredValue::Bid(bid) => bid.write_bytes(writer)?, - StoredValue::Withdraw(unbonding_purses) => unbonding_purses.write_bytes(writer)?, - StoredValue::Unbonding(unbonding_purses) => unbonding_purses.write_bytes(writer)?, - }; - Ok(()) - } -} - -impl FromBytes for StoredValue { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (tag, remainder): (u8, &[u8]) = FromBytes::from_bytes(bytes)?; - match tag { - tag if tag == Tag::CLValue as u8 => CLValue::from_bytes(remainder) - .map(|(cl_value, remainder)| (StoredValue::CLValue(cl_value), remainder)), - tag if tag == Tag::Account as u8 => Account::from_bytes(remainder) - .map(|(account, remainder)| (StoredValue::Account(account), remainder)), - tag if tag == Tag::ContractWasm as u8 => { - ContractWasm::from_bytes(remainder).map(|(contract_wasm, remainder)| { - (StoredValue::ContractWasm(contract_wasm), remainder) - }) - } - tag if tag == Tag::ContractPackage as u8 => { - ContractPackage::from_bytes(remainder).map(|(contract_package, remainder)| { - (StoredValue::ContractPackage(contract_package), remainder) - }) - } - tag if tag == Tag::Contract as u8 => Contract::from_bytes(remainder) - .map(|(contract, remainder)| (StoredValue::Contract(contract), remainder)), - tag if tag == Tag::Transfer as u8 => Transfer::from_bytes(remainder) - .map(|(transfer, remainder)| (StoredValue::Transfer(transfer), remainder)), - tag if tag == Tag::DeployInfo as u8 => DeployInfo::from_bytes(remainder) - .map(|(deploy_info, remainder)| (StoredValue::DeployInfo(deploy_info), remainder)), - tag if tag == Tag::EraInfo as u8 => EraInfo::from_bytes(remainder) - .map(|(deploy_info, remainder)| (StoredValue::EraInfo(deploy_info), remainder)), - tag if tag == Tag::Bid as u8 => Bid::from_bytes(remainder) - .map(|(bid, remainder)| (StoredValue::Bid(Box::new(bid)), remainder)), - tag if tag == Tag::Withdraw as u8 => { - Vec::::from_bytes(remainder).map(|(withdraw_purses, remainder)| { - (StoredValue::Withdraw(withdraw_purses), remainder) - }) - } - tag if tag == Tag::Unbonding as u8 => { - Vec::::from_bytes(remainder).map(|(unbonding_purses, remainder)| { - (StoredValue::Unbonding(unbonding_purses), remainder) - }) - } - _ => Err(bytesrepr::Error::Formatting), - } - } -} - -impl Serialize for StoredValue { - fn serialize(&self, serializer: S) -> Result { - // The JSON representation of a StoredValue is just its bytesrepr - // While this makes it harder to inspect, it makes deterministic representation simple. - let bytes = self - .to_bytes() - .map_err(|error| ser::Error::custom(format!("{:?}", error)))?; - ByteBuf::from(bytes).serialize(serializer) - } -} - -impl<'de> Deserialize<'de> for StoredValue { - fn deserialize>(deserializer: D) -> Result { - let bytes = ByteBuf::deserialize(deserializer)?.into_vec(); - bytesrepr::deserialize::(bytes) - .map_err(|error| de::Error::custom(format!("{:?}", error))) - } -} - -#[cfg(test)] -mod tests { - use proptest::proptest; - - use crate::{bytesrepr, gens}; - - proptest! { - #[test] - fn serialization_roundtrip(v in gens::stored_value_arb()) { - bytesrepr::test_serialization_roundtrip(&v); - } - } -} diff --git a/casper_types/src/stored_value/type_mismatch.rs b/casper_types/src/stored_value/type_mismatch.rs deleted file mode 100644 index cd59b766..00000000 --- a/casper_types/src/stored_value/type_mismatch.rs +++ /dev/null @@ -1,30 +0,0 @@ -use alloc::string::String; -use core::fmt::{self, Display, Formatter}; - -use serde::{Deserialize, Serialize}; - -#[derive(PartialEq, Eq, Debug, Clone, Serialize, Deserialize)] -/// An error struct representing a type mismatch in [`StoredValue`](crate::StoredValue) operations. -pub struct TypeMismatch { - /// The name of the expected type. - expected: String, - /// The actual type found. - found: String, -} - -impl TypeMismatch { - /// Creates a new `TypeMismatch`. - pub fn new(expected: String, found: String) -> TypeMismatch { - TypeMismatch { expected, found } - } -} - -impl Display for TypeMismatch { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - write!( - formatter, - "Type mismatch. Expected {} but found {}.", - self.expected, self.found - ) - } -} diff --git a/casper_types/src/system.rs b/casper_types/src/system.rs deleted file mode 100644 index cdae3f6f..00000000 --- a/casper_types/src/system.rs +++ /dev/null @@ -1,14 +0,0 @@ -//! System modules, formerly known as "system contracts" -pub mod auction; -mod call_stack_element; -mod error; -pub mod handle_payment; -pub mod mint; -pub mod standard_payment; -mod system_contract_type; - -pub use call_stack_element::{CallStackElement, CallStackElementTag}; -pub use error::Error; -pub use system_contract_type::{ - SystemContractType, AUCTION, HANDLE_PAYMENT, MINT, STANDARD_PAYMENT, -}; diff --git a/casper_types/src/system/auction.rs b/casper_types/src/system/auction.rs deleted file mode 100644 index 5831ab24..00000000 --- a/casper_types/src/system/auction.rs +++ /dev/null @@ -1,53 +0,0 @@ -//! Contains implementation of a Auction contract functionality. -mod bid; -mod constants; -mod delegator; -mod entry_points; -mod era_info; -mod error; -mod seigniorage_recipient; -mod unbonding_purse; -mod withdraw_purse; - -use alloc::{collections::BTreeMap, vec::Vec}; - -pub use bid::{Bid, VESTING_SCHEDULE_LENGTH_MILLIS}; -pub use constants::*; -pub use delegator::Delegator; -pub use entry_points::auction_entry_points; -pub use era_info::{EraInfo, SeigniorageAllocation}; -pub use error::Error; -pub use seigniorage_recipient::SeigniorageRecipient; -pub use unbonding_purse::UnbondingPurse; -pub use withdraw_purse::WithdrawPurse; - -#[cfg(any(feature = "testing", test))] -pub(crate) mod gens { - pub use super::era_info::gens::*; -} - -use crate::{account::AccountHash, EraId, PublicKey, U512}; - -/// Representation of delegation rate of tokens. Range from 0..=100. -pub type DelegationRate = u8; - -/// Validators mapped to their bids. -pub type Bids = BTreeMap; - -/// Weights of validators. "Weight" in this context means a sum of their stakes. -pub type ValidatorWeights = BTreeMap; - -/// List of era validators -pub type EraValidators = BTreeMap; - -/// Collection of seigniorage recipients. -pub type SeigniorageRecipients = BTreeMap; - -/// Snapshot of `SeigniorageRecipients` for a given era. -pub type SeigniorageRecipientsSnapshot = BTreeMap; - -/// Validators and delegators mapped to their unbonding purses. -pub type UnbondingPurses = BTreeMap>; - -/// Validators and delegators mapped to their withdraw purses. -pub type WithdrawPurses = BTreeMap>; diff --git a/casper_types/src/system/auction/bid.rs b/casper_types/src/system/auction/bid.rs deleted file mode 100644 index ca5f7625..00000000 --- a/casper_types/src/system/auction/bid.rs +++ /dev/null @@ -1,554 +0,0 @@ -// TODO - remove once schemars stops causing warning. -#![allow(clippy::field_reassign_with_default)] - -mod vesting; - -use alloc::{collections::BTreeMap, vec::Vec}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - system::auction::{DelegationRate, Delegator, Error}, - CLType, CLTyped, PublicKey, URef, U512, -}; - -pub use vesting::{VestingSchedule, VESTING_SCHEDULE_LENGTH_MILLIS}; - -/// An entry in the validator map. -#[derive(PartialEq, Eq, Debug, Serialize, Deserialize, Clone)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[serde(deny_unknown_fields)] -pub struct Bid { - /// Validator public key - validator_public_key: PublicKey, - /// The purse that was used for bonding. - bonding_purse: URef, - /// The amount of tokens staked by a validator (not including delegators). - staked_amount: U512, - /// Delegation rate - delegation_rate: DelegationRate, - /// Vesting schedule for a genesis validator. `None` if non-genesis validator. - vesting_schedule: Option, - /// This validator's delegators, indexed by their public keys - delegators: BTreeMap, - /// `true` if validator has been "evicted" - inactive: bool, -} - -impl Bid { - /// Creates new instance of a bid with locked funds. - pub fn locked( - validator_public_key: PublicKey, - bonding_purse: URef, - staked_amount: U512, - delegation_rate: DelegationRate, - release_timestamp_millis: u64, - ) -> Self { - let vesting_schedule = Some(VestingSchedule::new(release_timestamp_millis)); - let delegators = BTreeMap::new(); - let inactive = false; - Self { - validator_public_key, - bonding_purse, - staked_amount, - delegation_rate, - vesting_schedule, - delegators, - inactive, - } - } - - /// Creates new instance of a bid with unlocked funds. - pub fn unlocked( - validator_public_key: PublicKey, - bonding_purse: URef, - staked_amount: U512, - delegation_rate: DelegationRate, - ) -> Self { - let vesting_schedule = None; - let delegators = BTreeMap::new(); - let inactive = false; - Self { - validator_public_key, - bonding_purse, - staked_amount, - delegation_rate, - vesting_schedule, - delegators, - inactive, - } - } - - /// Creates a new inactive instance of a bid with 0 staked amount. - pub fn empty(validator_public_key: PublicKey, bonding_purse: URef) -> Self { - let vesting_schedule = None; - let delegators = BTreeMap::new(); - let inactive = true; - let staked_amount = 0.into(); - let delegation_rate = Default::default(); - Self { - validator_public_key, - bonding_purse, - staked_amount, - delegation_rate, - vesting_schedule, - delegators, - inactive, - } - } - - /// Gets the validator public key of the provided bid - pub fn validator_public_key(&self) -> &PublicKey { - &self.validator_public_key - } - - /// Gets the bonding purse of the provided bid - pub fn bonding_purse(&self) -> &URef { - &self.bonding_purse - } - - /// Checks if a bid is still locked under a vesting schedule. - /// - /// Returns true if a timestamp falls below the initial lockup period + 91 days release - /// schedule, otherwise false. - pub fn is_locked(&self, timestamp_millis: u64) -> bool { - self.is_locked_with_vesting_schedule(timestamp_millis, VESTING_SCHEDULE_LENGTH_MILLIS) - } - - /// Checks if a bid is still locked under a vesting schedule. - /// - /// Returns true if a timestamp falls below the initial lockup period + 91 days release - /// schedule, otherwise false. - pub fn is_locked_with_vesting_schedule( - &self, - timestamp_millis: u64, - vesting_schedule_period_millis: u64, - ) -> bool { - match &self.vesting_schedule { - Some(vesting_schedule) => { - vesting_schedule.is_vesting(timestamp_millis, vesting_schedule_period_millis) - } - None => false, - } - } - - /// Gets the staked amount of the provided bid - pub fn staked_amount(&self) -> &U512 { - &self.staked_amount - } - - /// Gets the staked amount of the provided bid - pub fn staked_amount_mut(&mut self) -> &mut U512 { - &mut self.staked_amount - } - - /// Gets the delegation rate of the provided bid - pub fn delegation_rate(&self) -> &DelegationRate { - &self.delegation_rate - } - - /// Returns a reference to the vesting schedule of the provided bid. `None` if a non-genesis - /// validator. - pub fn vesting_schedule(&self) -> Option<&VestingSchedule> { - self.vesting_schedule.as_ref() - } - - /// Returns a mutable reference to the vesting schedule of the provided bid. `None` if a - /// non-genesis validator. - pub fn vesting_schedule_mut(&mut self) -> Option<&mut VestingSchedule> { - self.vesting_schedule.as_mut() - } - - /// Returns a reference to the delegators of the provided bid - pub fn delegators(&self) -> &BTreeMap { - &self.delegators - } - - /// Returns a mutable reference to the delegators of the provided bid - pub fn delegators_mut(&mut self) -> &mut BTreeMap { - &mut self.delegators - } - - /// Returns `true` if validator is inactive - pub fn inactive(&self) -> bool { - self.inactive - } - - /// Decreases the stake of the provided bid - pub fn decrease_stake( - &mut self, - amount: U512, - era_end_timestamp_millis: u64, - ) -> Result { - let updated_staked_amount = self - .staked_amount - .checked_sub(amount) - .ok_or(Error::UnbondTooLarge)?; - - let vesting_schedule = match self.vesting_schedule.as_ref() { - Some(vesting_schedule) => vesting_schedule, - None => { - self.staked_amount = updated_staked_amount; - return Ok(updated_staked_amount); - } - }; - - match vesting_schedule.locked_amount(era_end_timestamp_millis) { - Some(locked_amount) if updated_staked_amount < locked_amount => { - Err(Error::ValidatorFundsLocked) - } - None => { - // If `None`, then the locked amounts table has yet to be initialized (likely - // pre-90 day mark) - Err(Error::ValidatorFundsLocked) - } - Some(_) => { - self.staked_amount = updated_staked_amount; - Ok(updated_staked_amount) - } - } - } - - /// Increases the stake of the provided bid - pub fn increase_stake(&mut self, amount: U512) -> Result { - let updated_staked_amount = self - .staked_amount - .checked_add(amount) - .ok_or(Error::InvalidAmount)?; - - self.staked_amount = updated_staked_amount; - - Ok(updated_staked_amount) - } - - /// Updates the delegation rate of the provided bid - pub fn with_delegation_rate(&mut self, delegation_rate: DelegationRate) -> &mut Self { - self.delegation_rate = delegation_rate; - self - } - - /// Initializes the vesting schedule of provided bid if the provided timestamp is greater than - /// or equal to the bid's initial release timestamp and the bid is owned by a genesis - /// validator. This method initializes with default 14 week vesting schedule. - /// - /// Returns `true` if the provided bid's vesting schedule was initialized. - pub fn process(&mut self, timestamp_millis: u64) -> bool { - self.process_with_vesting_schedule(timestamp_millis, VESTING_SCHEDULE_LENGTH_MILLIS) - } - - /// Initializes the vesting schedule of provided bid if the provided timestamp is greater than - /// or equal to the bid's initial release timestamp and the bid is owned by a genesis - /// validator. - /// - /// Returns `true` if the provided bid's vesting schedule was initialized. - pub fn process_with_vesting_schedule( - &mut self, - timestamp_millis: u64, - vesting_schedule_period_millis: u64, - ) -> bool { - // Put timestamp-sensitive processing logic in here - let staked_amount = self.staked_amount; - let vesting_schedule = match self.vesting_schedule_mut() { - Some(vesting_schedule) => vesting_schedule, - None => return false, - }; - if timestamp_millis < vesting_schedule.initial_release_timestamp_millis() { - return false; - } - - let mut initialized = false; - - if vesting_schedule.initialize_with_schedule(staked_amount, vesting_schedule_period_millis) - { - initialized = true; - } - - for delegator in self.delegators_mut().values_mut() { - let staked_amount = *delegator.staked_amount(); - if let Some(vesting_schedule) = delegator.vesting_schedule_mut() { - if timestamp_millis >= vesting_schedule.initial_release_timestamp_millis() - && vesting_schedule - .initialize_with_schedule(staked_amount, vesting_schedule_period_millis) - { - initialized = true; - } - } - } - - initialized - } - - /// Sets given bid's `inactive` field to `false` - pub fn activate(&mut self) -> bool { - self.inactive = false; - false - } - - /// Sets given bid's `inactive` field to `true` - pub fn deactivate(&mut self) -> bool { - self.inactive = true; - true - } - - /// Returns the total staked amount of validator + all delegators - pub fn total_staked_amount(&self) -> Result { - self.delegators - .iter() - .try_fold(U512::zero(), |a, (_, b)| a.checked_add(*b.staked_amount())) - .and_then(|delegators_sum| delegators_sum.checked_add(*self.staked_amount())) - .ok_or(Error::InvalidAmount) - } -} - -impl CLTyped for Bid { - fn cl_type() -> CLType { - CLType::Any - } -} - -impl ToBytes for Bid { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut result = bytesrepr::allocate_buffer(self)?; - self.validator_public_key.write_bytes(&mut result)?; - self.bonding_purse.write_bytes(&mut result)?; - self.staked_amount.write_bytes(&mut result)?; - self.delegation_rate.write_bytes(&mut result)?; - self.vesting_schedule.write_bytes(&mut result)?; - self.delegators().write_bytes(&mut result)?; - self.inactive.write_bytes(&mut result)?; - Ok(result) - } - - fn serialized_length(&self) -> usize { - self.validator_public_key.serialized_length() - + self.bonding_purse.serialized_length() - + self.staked_amount.serialized_length() - + self.delegation_rate.serialized_length() - + self.vesting_schedule.serialized_length() - + self.delegators.serialized_length() - + self.inactive.serialized_length() - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.validator_public_key.write_bytes(writer)?; - self.bonding_purse.write_bytes(writer)?; - self.staked_amount.write_bytes(writer)?; - self.delegation_rate.write_bytes(writer)?; - self.vesting_schedule.write_bytes(writer)?; - self.delegators().write_bytes(writer)?; - self.inactive.write_bytes(writer)?; - Ok(()) - } -} - -impl FromBytes for Bid { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (validator_public_key, bytes) = FromBytes::from_bytes(bytes)?; - let (bonding_purse, bytes) = FromBytes::from_bytes(bytes)?; - let (staked_amount, bytes) = FromBytes::from_bytes(bytes)?; - let (delegation_rate, bytes) = FromBytes::from_bytes(bytes)?; - let (vesting_schedule, bytes) = FromBytes::from_bytes(bytes)?; - let (delegators, bytes) = FromBytes::from_bytes(bytes)?; - let (inactive, bytes) = FromBytes::from_bytes(bytes)?; - Ok(( - Bid { - validator_public_key, - bonding_purse, - staked_amount, - delegation_rate, - vesting_schedule, - delegators, - inactive, - }, - bytes, - )) - } -} - -#[cfg(test)] -mod tests { - use alloc::collections::BTreeMap; - - use crate::{ - bytesrepr, - system::auction::{bid::VestingSchedule, Bid, DelegationRate, Delegator}, - AccessRights, PublicKey, SecretKey, URef, U512, - }; - - const WEEK_MILLIS: u64 = 7 * 24 * 60 * 60 * 1000; - const TEST_VESTING_SCHEDULE_LENGTH_MILLIS: u64 = 7 * WEEK_MILLIS; - - #[test] - fn serialization_roundtrip() { - let founding_validator = Bid { - validator_public_key: PublicKey::from( - &SecretKey::ed25519_from_bytes([0u8; SecretKey::ED25519_LENGTH]).unwrap(), - ), - bonding_purse: URef::new([42; 32], AccessRights::READ_ADD_WRITE), - staked_amount: U512::one(), - delegation_rate: DelegationRate::max_value(), - vesting_schedule: Some(VestingSchedule::default()), - delegators: BTreeMap::default(), - inactive: true, - }; - bytesrepr::test_serialization_roundtrip(&founding_validator); - } - - #[test] - fn should_immediately_initialize_unlock_amounts() { - const TIMESTAMP_MILLIS: u64 = 0; - - let validator_pk: PublicKey = (&SecretKey::ed25519_from_bytes([42; 32]).unwrap()).into(); - - let validator_release_timestamp = TIMESTAMP_MILLIS; - let vesting_schedule_period_millis = TIMESTAMP_MILLIS; - let validator_bonding_purse = URef::new([42; 32], AccessRights::ADD); - let validator_staked_amount = U512::from(1000); - let validator_delegation_rate = 0; - - let mut bid = Bid::locked( - validator_pk, - validator_bonding_purse, - validator_staked_amount, - validator_delegation_rate, - validator_release_timestamp, - ); - - assert!(bid.process_with_vesting_schedule( - validator_release_timestamp, - vesting_schedule_period_millis, - )); - assert!(!bid.is_locked_with_vesting_schedule( - validator_release_timestamp, - vesting_schedule_period_millis - )); - } - - #[test] - fn should_initialize_delegators_different_timestamps() { - const TIMESTAMP_MILLIS: u64 = WEEK_MILLIS; - - let validator_pk: PublicKey = (&SecretKey::ed25519_from_bytes([42; 32]).unwrap()).into(); - - let delegator_1_pk: PublicKey = (&SecretKey::ed25519_from_bytes([43; 32]).unwrap()).into(); - let delegator_2_pk: PublicKey = (&SecretKey::ed25519_from_bytes([44; 32]).unwrap()).into(); - - let validator_release_timestamp = TIMESTAMP_MILLIS; - let validator_bonding_purse = URef::new([42; 32], AccessRights::ADD); - let validator_staked_amount = U512::from(1000); - let validator_delegation_rate = 0; - - let delegator_1_release_timestamp = TIMESTAMP_MILLIS + 1; - let delegator_1_bonding_purse = URef::new([52; 32], AccessRights::ADD); - let delegator_1_staked_amount = U512::from(2000); - - let delegator_2_release_timestamp = TIMESTAMP_MILLIS + 2; - let delegator_2_bonding_purse = URef::new([62; 32], AccessRights::ADD); - let delegator_2_staked_amount = U512::from(3000); - - let delegator_1 = Delegator::locked( - delegator_1_pk.clone(), - delegator_1_staked_amount, - delegator_1_bonding_purse, - validator_pk.clone(), - delegator_1_release_timestamp, - ); - - let delegator_2 = Delegator::locked( - delegator_2_pk.clone(), - delegator_2_staked_amount, - delegator_2_bonding_purse, - validator_pk.clone(), - delegator_2_release_timestamp, - ); - - let mut bid = Bid::locked( - validator_pk, - validator_bonding_purse, - validator_staked_amount, - validator_delegation_rate, - validator_release_timestamp, - ); - - assert!(!bid.process_with_vesting_schedule( - validator_release_timestamp - 1, - TEST_VESTING_SCHEDULE_LENGTH_MILLIS - )); - - { - let delegators = bid.delegators_mut(); - - delegators.insert(delegator_1_pk.clone(), delegator_1); - delegators.insert(delegator_2_pk.clone(), delegator_2); - } - - assert!(bid.process_with_vesting_schedule( - delegator_1_release_timestamp, - TEST_VESTING_SCHEDULE_LENGTH_MILLIS - )); - - let delegator_1_updated_1 = bid.delegators().get(&delegator_1_pk).cloned().unwrap(); - assert!(delegator_1_updated_1 - .vesting_schedule() - .unwrap() - .locked_amounts() - .is_some()); - - let delegator_2_updated_1 = bid.delegators().get(&delegator_2_pk).cloned().unwrap(); - assert!(delegator_2_updated_1 - .vesting_schedule() - .unwrap() - .locked_amounts() - .is_none()); - - assert!(bid.process_with_vesting_schedule( - delegator_2_release_timestamp, - TEST_VESTING_SCHEDULE_LENGTH_MILLIS - )); - - let delegator_1_updated_2 = bid.delegators().get(&delegator_1_pk).cloned().unwrap(); - assert!(delegator_1_updated_2 - .vesting_schedule() - .unwrap() - .locked_amounts() - .is_some()); - // Delegator 1 is already initialized and did not change after 2nd Bid::process - assert_eq!(delegator_1_updated_1, delegator_1_updated_2); - - let delegator_2_updated_2 = bid.delegators().get(&delegator_2_pk).cloned().unwrap(); - assert!(delegator_2_updated_2 - .vesting_schedule() - .unwrap() - .locked_amounts() - .is_some()); - - // Delegator 2 is different compared to first Bid::process - assert_ne!(delegator_2_updated_1, delegator_2_updated_2); - - // Validator initialized, and all delegators initialized - assert!(!bid.process_with_vesting_schedule( - delegator_2_release_timestamp + 1, - TEST_VESTING_SCHEDULE_LENGTH_MILLIS - )); - } -} - -#[cfg(test)] -mod prop_tests { - use proptest::prelude::*; - - use crate::{bytesrepr, gens}; - - proptest! { - #[test] - fn test_value_bid(bid in gens::bid_arb(1..100)) { - bytesrepr::test_serialization_roundtrip(&bid); - } - } -} diff --git a/casper_types/src/system/auction/bid/vesting.rs b/casper_types/src/system/auction/bid/vesting.rs deleted file mode 100644 index 6d59f27c..00000000 --- a/casper_types/src/system/auction/bid/vesting.rs +++ /dev/null @@ -1,523 +0,0 @@ -// TODO - remove once schemars stops causing warning. -#![allow(clippy::field_reassign_with_default)] - -use alloc::vec::Vec; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -use crate::{ - bytesrepr::{self, Error, FromBytes, ToBytes}, - U512, -}; - -const DAY_MILLIS: usize = 24 * 60 * 60 * 1000; -const DAYS_IN_WEEK: usize = 7; -const WEEK_MILLIS: usize = DAYS_IN_WEEK * DAY_MILLIS; - -/// Length of total vesting schedule in days. -const VESTING_SCHEDULE_LENGTH_DAYS: usize = 91; -/// Length of total vesting schedule expressed in days. -pub const VESTING_SCHEDULE_LENGTH_MILLIS: u64 = - VESTING_SCHEDULE_LENGTH_DAYS as u64 * DAY_MILLIS as u64; -/// 91 days / 7 days in a week = 13 weeks -const LOCKED_AMOUNTS_MAX_LENGTH: usize = (VESTING_SCHEDULE_LENGTH_DAYS / DAYS_IN_WEEK) + 1; - -#[derive(Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[serde(deny_unknown_fields)] -pub struct VestingSchedule { - initial_release_timestamp_millis: u64, - locked_amounts: Option<[U512; LOCKED_AMOUNTS_MAX_LENGTH]>, -} - -fn vesting_schedule_period_to_weeks(vesting_schedule_period_millis: u64) -> usize { - debug_assert_ne!(DAY_MILLIS, 0); - debug_assert_ne!(DAYS_IN_WEEK, 0); - vesting_schedule_period_millis as usize / DAY_MILLIS / DAYS_IN_WEEK -} - -impl VestingSchedule { - pub fn new(initial_release_timestamp_millis: u64) -> Self { - let locked_amounts = None; - VestingSchedule { - initial_release_timestamp_millis, - locked_amounts, - } - } - - /// Initializes vesting schedule with a configured amount of weekly releases. - /// - /// Returns `false` if already initialized. - /// - /// # Panics - /// - /// Panics if `vesting_schedule_period_millis` represents more than 13 weeks. - pub fn initialize_with_schedule( - &mut self, - staked_amount: U512, - vesting_schedule_period_millis: u64, - ) -> bool { - if self.locked_amounts.is_some() { - return false; - } - - let locked_amounts_length = - vesting_schedule_period_to_weeks(vesting_schedule_period_millis); - - assert!( - locked_amounts_length < LOCKED_AMOUNTS_MAX_LENGTH, - "vesting schedule period must be less than {} weeks", - LOCKED_AMOUNTS_MAX_LENGTH, - ); - - if locked_amounts_length == 0 || vesting_schedule_period_millis == 0 { - // Zero weeks means instant unlock of staked amount. - self.locked_amounts = Some(Default::default()); - return true; - } - - let release_period: U512 = U512::from(locked_amounts_length + 1); - let weekly_release = staked_amount / release_period; - - let mut locked_amounts = [U512::zero(); LOCKED_AMOUNTS_MAX_LENGTH]; - let mut remaining_locked = staked_amount; - - for locked_amount in locked_amounts.iter_mut().take(locked_amounts_length) { - remaining_locked -= weekly_release; - *locked_amount = remaining_locked; - } - - assert_eq!( - locked_amounts.get(locked_amounts_length), - Some(&U512::zero()), - "first element after the schedule should be zero" - ); - - self.locked_amounts = Some(locked_amounts); - true - } - - /// Initializes weekly release for a fixed amount of 14 weeks period. - /// - /// Returns `false` if already initialized. - pub fn initialize(&mut self, staked_amount: U512) -> bool { - self.initialize_with_schedule(staked_amount, VESTING_SCHEDULE_LENGTH_MILLIS) - } - - pub fn initial_release_timestamp_millis(&self) -> u64 { - self.initial_release_timestamp_millis - } - - pub fn locked_amounts(&self) -> Option<&[U512]> { - let locked_amounts = self.locked_amounts.as_ref()?; - Some(locked_amounts.as_slice()) - } - - pub fn locked_amount(&self, timestamp_millis: u64) -> Option { - let locked_amounts = self.locked_amounts()?; - - let index = { - let index_timestamp = - timestamp_millis.checked_sub(self.initial_release_timestamp_millis)?; - (index_timestamp as usize).checked_div(WEEK_MILLIS)? - }; - - let locked_amount = locked_amounts.get(index).cloned().unwrap_or_default(); - - Some(locked_amount) - } - - /// Checks if this vesting schedule is still under the vesting - pub(crate) fn is_vesting( - &self, - timestamp_millis: u64, - vesting_schedule_period_millis: u64, - ) -> bool { - let vested_period = match self.locked_amounts() { - Some(locked_amounts) => { - let vesting_weeks = locked_amounts - .iter() - .position(|amount| amount.is_zero()) - .expect("vesting schedule should always have zero at the end"); // SAFETY: at least one zero is guaranteed by `initialize_with_schedule` method - - let vesting_weeks_millis = - (vesting_weeks as u64).saturating_mul(WEEK_MILLIS as u64); - - self.initial_release_timestamp_millis() - .saturating_add(vesting_weeks_millis) - } - None => { - // Uninitialized yet but we know this will be the configured period of time. - self.initial_release_timestamp_millis() - .saturating_add(vesting_schedule_period_millis) - } - }; - - timestamp_millis < vested_period - } -} - -impl ToBytes for [U512; LOCKED_AMOUNTS_MAX_LENGTH] { - fn to_bytes(&self) -> Result, Error> { - let mut result = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut result)?; - Ok(result) - } - - fn serialized_length(&self) -> usize { - self.iter().map(ToBytes::serialized_length).sum::() - } - - #[inline] - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - for amount in self { - amount.write_bytes(writer)?; - } - Ok(()) - } -} - -impl FromBytes for [U512; LOCKED_AMOUNTS_MAX_LENGTH] { - fn from_bytes(mut bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let mut result = [U512::zero(); LOCKED_AMOUNTS_MAX_LENGTH]; - for value in &mut result { - let (amount, rem) = FromBytes::from_bytes(bytes)?; - *value = amount; - bytes = rem; - } - Ok((result, bytes)) - } -} - -impl ToBytes for VestingSchedule { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut result = bytesrepr::allocate_buffer(self)?; - result.append(&mut self.initial_release_timestamp_millis.to_bytes()?); - result.append(&mut self.locked_amounts.to_bytes()?); - Ok(result) - } - - fn serialized_length(&self) -> usize { - self.initial_release_timestamp_millis.serialized_length() - + self.locked_amounts.serialized_length() - } -} - -impl FromBytes for VestingSchedule { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (initial_release_timestamp_millis, bytes) = FromBytes::from_bytes(bytes)?; - let (locked_amounts, bytes) = FromBytes::from_bytes(bytes)?; - Ok(( - VestingSchedule { - initial_release_timestamp_millis, - locked_amounts, - }, - bytes, - )) - } -} - -/// Generators for [`VestingSchedule`] -#[cfg(test)] -mod gens { - use proptest::{ - array, option, - prelude::{Arbitrary, Strategy}, - }; - - use super::VestingSchedule; - use crate::gens::u512_arb; - - pub fn vesting_schedule_arb() -> impl Strategy { - (::arbitrary(), option::of(array::uniform14(u512_arb()))).prop_map( - |(initial_release_timestamp_millis, locked_amounts)| VestingSchedule { - initial_release_timestamp_millis, - locked_amounts, - }, - ) - } -} - -#[cfg(test)] -mod tests { - use proptest::{prop_assert, proptest}; - - use crate::{ - bytesrepr, - gens::u512_arb, - system::auction::bid::{ - vesting::{gens::vesting_schedule_arb, vesting_schedule_period_to_weeks, WEEK_MILLIS}, - VestingSchedule, - }, - U512, - }; - - use super::*; - - /// Default lock-in period of 90 days - const DEFAULT_LOCKED_FUNDS_PERIOD_MILLIS: u64 = 90 * DAY_MILLIS as u64; - const RELEASE_TIMESTAMP: u64 = DEFAULT_LOCKED_FUNDS_PERIOD_MILLIS; - const STAKE: u64 = 140; - - const DEFAULT_VESTING_SCHEDULE_PERIOD_MILLIS: u64 = 91 * DAY_MILLIS as u64; - const LOCKED_AMOUNTS_LENGTH: usize = - (DEFAULT_VESTING_SCHEDULE_PERIOD_MILLIS as usize / WEEK_MILLIS) + 1; - - #[test] - #[should_panic = "vesting schedule period must be less than"] - fn test_vesting_schedule_exceeding_the_maximum_should_not_panic() { - let future_date = 98 * DAY_MILLIS as u64; - let mut vesting_schedule = VestingSchedule::new(RELEASE_TIMESTAMP); - vesting_schedule.initialize_with_schedule(U512::from(STAKE), future_date); - - assert_eq!(vesting_schedule.locked_amount(0), None); - assert_eq!(vesting_schedule.locked_amount(RELEASE_TIMESTAMP - 1), None); - } - - #[test] - fn test_locked_amount_check_should_not_panic() { - let mut vesting_schedule = VestingSchedule::new(RELEASE_TIMESTAMP); - vesting_schedule.initialize(U512::from(STAKE)); - - assert_eq!(vesting_schedule.locked_amount(0), None); - assert_eq!(vesting_schedule.locked_amount(RELEASE_TIMESTAMP - 1), None); - } - - #[test] - fn test_locked_with_zero_length_schedule_should_not_panic() { - let mut vesting_schedule = VestingSchedule::new(RELEASE_TIMESTAMP); - vesting_schedule.initialize_with_schedule(U512::from(STAKE), 0); - - assert_eq!(vesting_schedule.locked_amount(0), None); - assert_eq!(vesting_schedule.locked_amount(RELEASE_TIMESTAMP - 1), None); - } - - #[test] - fn test_locked_amount() { - let mut vesting_schedule = VestingSchedule::new(RELEASE_TIMESTAMP); - vesting_schedule.initialize(U512::from(STAKE)); - - let mut timestamp = RELEASE_TIMESTAMP; - - assert_eq!( - vesting_schedule.locked_amount(timestamp), - Some(U512::from(130)) - ); - - timestamp = RELEASE_TIMESTAMP + WEEK_MILLIS as u64 - 1; - assert_eq!( - vesting_schedule.locked_amount(timestamp), - Some(U512::from(130)) - ); - - timestamp = RELEASE_TIMESTAMP + WEEK_MILLIS as u64; - assert_eq!( - vesting_schedule.locked_amount(timestamp), - Some(U512::from(120)) - ); - - timestamp = RELEASE_TIMESTAMP + WEEK_MILLIS as u64 + 1; - assert_eq!( - vesting_schedule.locked_amount(timestamp), - Some(U512::from(120)) - ); - - timestamp = RELEASE_TIMESTAMP + (WEEK_MILLIS as u64 * 2) - 1; - assert_eq!( - vesting_schedule.locked_amount(timestamp), - Some(U512::from(120)) - ); - - timestamp = RELEASE_TIMESTAMP + (WEEK_MILLIS as u64 * 2); - assert_eq!( - vesting_schedule.locked_amount(timestamp), - Some(U512::from(110)) - ); - - timestamp = RELEASE_TIMESTAMP + (WEEK_MILLIS as u64 * 2) + 1; - assert_eq!( - vesting_schedule.locked_amount(timestamp), - Some(U512::from(110)) - ); - - timestamp = RELEASE_TIMESTAMP + (WEEK_MILLIS as u64 * 3) - 1; - assert_eq!( - vesting_schedule.locked_amount(timestamp), - Some(U512::from(110)) - ); - - timestamp = RELEASE_TIMESTAMP + (WEEK_MILLIS as u64 * 3); - assert_eq!( - vesting_schedule.locked_amount(timestamp), - Some(U512::from(100)) - ); - - timestamp = RELEASE_TIMESTAMP + (WEEK_MILLIS as u64 * 3) + 1; - assert_eq!( - vesting_schedule.locked_amount(timestamp), - Some(U512::from(100)) - ); - - timestamp = RELEASE_TIMESTAMP + (WEEK_MILLIS as u64 * 12) - 1; - assert_eq!( - vesting_schedule.locked_amount(timestamp), - Some(U512::from(20)) - ); - - timestamp = RELEASE_TIMESTAMP + (WEEK_MILLIS as u64 * 12); - assert_eq!( - vesting_schedule.locked_amount(timestamp), - Some(U512::from(10)) - ); - - timestamp = RELEASE_TIMESTAMP + (WEEK_MILLIS as u64 * 12) + 1; - assert_eq!( - vesting_schedule.locked_amount(timestamp), - Some(U512::from(10)) - ); - - timestamp = RELEASE_TIMESTAMP + (WEEK_MILLIS as u64 * 13) - 1; - assert_eq!( - vesting_schedule.locked_amount(timestamp), - Some(U512::from(10)) - ); - - timestamp = RELEASE_TIMESTAMP + (WEEK_MILLIS as u64 * 13); - assert_eq!( - vesting_schedule.locked_amount(timestamp), - Some(U512::from(0)) - ); - - timestamp = RELEASE_TIMESTAMP + (WEEK_MILLIS as u64 * 13) + 1; - assert_eq!( - vesting_schedule.locked_amount(timestamp), - Some(U512::from(0)) - ); - - timestamp = RELEASE_TIMESTAMP + (WEEK_MILLIS as u64 * 14) - 1; - assert_eq!( - vesting_schedule.locked_amount(timestamp), - Some(U512::from(0)) - ); - - timestamp = RELEASE_TIMESTAMP + (WEEK_MILLIS as u64 * 14); - assert_eq!( - vesting_schedule.locked_amount(timestamp), - Some(U512::from(0)) - ); - - timestamp = RELEASE_TIMESTAMP + (WEEK_MILLIS as u64 * 14) + 1; - assert_eq!( - vesting_schedule.locked_amount(timestamp), - Some(U512::from(0)) - ); - } - - fn vested_amounts_match_initial_stake( - initial_stake: U512, - release_timestamp: u64, - vesting_schedule_length: u64, - ) -> bool { - let mut vesting_schedule = VestingSchedule::new(release_timestamp); - vesting_schedule.initialize_with_schedule(initial_stake, vesting_schedule_length); - - let mut total_vested_amounts = U512::zero(); - - for i in 0..LOCKED_AMOUNTS_LENGTH { - let timestamp = release_timestamp + (WEEK_MILLIS * i) as u64; - if let Some(locked_amount) = vesting_schedule.locked_amount(timestamp) { - let current_vested_amount = initial_stake - locked_amount - total_vested_amounts; - total_vested_amounts += current_vested_amount - } - } - - total_vested_amounts == initial_stake - } - - #[test] - fn vested_amounts_conserve_stake() { - let stake = U512::from(1000); - assert!(vested_amounts_match_initial_stake( - stake, - DEFAULT_LOCKED_FUNDS_PERIOD_MILLIS, - DEFAULT_VESTING_SCHEDULE_PERIOD_MILLIS, - )) - } - - #[test] - fn is_vesting_with_default_schedule() { - let initial_stake = U512::from(1000u64); - let release_timestamp = DEFAULT_LOCKED_FUNDS_PERIOD_MILLIS; - let mut vesting_schedule = VestingSchedule::new(release_timestamp); - - let is_vesting_before: Vec = (0..LOCKED_AMOUNTS_LENGTH + 1) - .map(|i| { - vesting_schedule.is_vesting( - release_timestamp + (WEEK_MILLIS * i) as u64, - DEFAULT_VESTING_SCHEDULE_PERIOD_MILLIS, - ) - }) - .collect(); - - assert_eq!( - is_vesting_before, - vec![ - true, true, true, true, true, true, true, true, true, true, true, true, true, - false, // week after is always set to zero - false - ] - ); - vesting_schedule.initialize(initial_stake); - - let is_vesting_after: Vec = (0..LOCKED_AMOUNTS_LENGTH + 1) - .map(|i| { - vesting_schedule.is_vesting( - release_timestamp + (WEEK_MILLIS * i) as u64, - DEFAULT_VESTING_SCHEDULE_PERIOD_MILLIS, - ) - }) - .collect(); - - assert_eq!( - is_vesting_after, - vec![ - true, true, true, true, true, true, true, true, true, true, true, true, true, - false, // week after is always set to zero - false, - ] - ); - } - - #[test] - fn should_calculate_vesting_schedule_period_to_weeks() { - let thirteen_weeks_millis = 13 * 7 * DAY_MILLIS as u64; - assert_eq!(vesting_schedule_period_to_weeks(thirteen_weeks_millis), 13,); - - assert_eq!(vesting_schedule_period_to_weeks(0), 0); - assert_eq!( - vesting_schedule_period_to_weeks(u64::MAX), - 30_500_568_904usize - ); - } - - proptest! { - #[test] - fn prop_total_vested_amounts_conserve_stake(stake in u512_arb()) { - prop_assert!(vested_amounts_match_initial_stake( - stake, - DEFAULT_LOCKED_FUNDS_PERIOD_MILLIS, - DEFAULT_VESTING_SCHEDULE_PERIOD_MILLIS, - )) - } - - #[test] - fn prop_serialization_roundtrip(vesting_schedule in vesting_schedule_arb()) { - bytesrepr::test_serialization_roundtrip(&vesting_schedule) - } - } -} diff --git a/casper_types/src/system/auction/constants.rs b/casper_types/src/system/auction/constants.rs deleted file mode 100644 index e54e1f4d..00000000 --- a/casper_types/src/system/auction/constants.rs +++ /dev/null @@ -1,98 +0,0 @@ -use crate::EraId; - -use super::DelegationRate; - -/// Initial value of era id we start at genesis. -pub const INITIAL_ERA_ID: EraId = EraId::new(0); - -/// Initial value of era end timestamp. -pub const INITIAL_ERA_END_TIMESTAMP_MILLIS: u64 = 0; - -/// Delegation rate is a fraction between 0-1. Validator sets the delegation rate -/// in integer terms, which is then divided by the denominator to obtain the fraction. -pub const DELEGATION_RATE_DENOMINATOR: DelegationRate = 100; - -/// We use one trillion as a block reward unit because it's large enough to allow precise -/// fractions, and small enough for many block rewards to fit into a u64. -pub const BLOCK_REWARD: u64 = 1_000_000_000_000; - -/// Named constant for `amount`. -pub const ARG_AMOUNT: &str = "amount"; -/// Named constant for `delegation_rate`. -pub const ARG_DELEGATION_RATE: &str = "delegation_rate"; -/// Named constant for `account_hash`. -pub const ARG_PUBLIC_KEY: &str = "public_key"; -/// Named constant for `validator`. -pub const ARG_VALIDATOR: &str = "validator"; -/// Named constant for `delegator`. -pub const ARG_DELEGATOR: &str = "delegator"; -/// Named constant for `validator_purse`. -pub const ARG_VALIDATOR_PURSE: &str = "validator_purse"; -/// Named constant for `validator_keys`. -pub const ARG_VALIDATOR_KEYS: &str = "validator_keys"; -/// Named constant for `validator_public_keys`. -pub const ARG_VALIDATOR_PUBLIC_KEYS: &str = "validator_public_keys"; -/// Named constant for `new_validator`. -pub const ARG_NEW_VALIDATOR: &str = "new_validator"; -/// Named constant for `era_id`. -pub const ARG_ERA_ID: &str = "era_id"; -/// Named constant for `reward_factors`. -pub const ARG_REWARD_FACTORS: &str = "reward_factors"; -/// Named constant for `validator_public_key`. -pub const ARG_VALIDATOR_PUBLIC_KEY: &str = "validator_public_key"; -/// Named constant for `delegator_public_key`. -pub const ARG_DELEGATOR_PUBLIC_KEY: &str = "delegator_public_key"; -/// Named constant for `validator_slots` argument. -pub const ARG_VALIDATOR_SLOTS: &str = VALIDATOR_SLOTS_KEY; -/// Named constant for `mint_contract_package_hash` -pub const ARG_MINT_CONTRACT_PACKAGE_HASH: &str = "mint_contract_package_hash"; -/// Named constant for `genesis_validators` -pub const ARG_GENESIS_VALIDATORS: &str = "genesis_validators"; -/// Named constant of `auction_delay` -pub const ARG_AUCTION_DELAY: &str = "auction_delay"; -/// Named constant for `locked_funds_period` -pub const ARG_LOCKED_FUNDS_PERIOD: &str = "locked_funds_period"; -/// Named constant for `unbonding_delay` -pub const ARG_UNBONDING_DELAY: &str = "unbonding_delay"; -/// Named constant for `era_end_timestamp_millis`; -pub const ARG_ERA_END_TIMESTAMP_MILLIS: &str = "era_end_timestamp_millis"; -/// Named constant for `evicted_validators`; -pub const ARG_EVICTED_VALIDATORS: &str = "evicted_validators"; - -/// Named constant for method `get_era_validators`. -pub const METHOD_GET_ERA_VALIDATORS: &str = "get_era_validators"; -/// Named constant for method `add_bid`. -pub const METHOD_ADD_BID: &str = "add_bid"; -/// Named constant for method `withdraw_bid`. -pub const METHOD_WITHDRAW_BID: &str = "withdraw_bid"; -/// Named constant for method `delegate`. -pub const METHOD_DELEGATE: &str = "delegate"; -/// Named constant for method `undelegate`. -pub const METHOD_UNDELEGATE: &str = "undelegate"; -/// Named constant for method `redelegate`. -pub const METHOD_REDELEGATE: &str = "redelegate"; -/// Named constant for method `run_auction`. -pub const METHOD_RUN_AUCTION: &str = "run_auction"; -/// Named constant for method `slash`. -pub const METHOD_SLASH: &str = "slash"; -/// Named constant for method `distribute`. -pub const METHOD_DISTRIBUTE: &str = "distribute"; -/// Named constant for method `read_era_id`. -pub const METHOD_READ_ERA_ID: &str = "read_era_id"; -/// Named constant for method `activate_bid`. -pub const METHOD_ACTIVATE_BID: &str = "activate_bid"; - -/// Storage for `EraId`. -pub const ERA_ID_KEY: &str = "era_id"; -/// Storage for era-end timestamp. -pub const ERA_END_TIMESTAMP_MILLIS_KEY: &str = "era_end_timestamp_millis"; -/// Storage for `SeigniorageRecipientsSnapshot`. -pub const SEIGNIORAGE_RECIPIENTS_SNAPSHOT_KEY: &str = "seigniorage_recipients_snapshot"; -/// Total validator slots allowed. -pub const VALIDATOR_SLOTS_KEY: &str = "validator_slots"; -/// Amount of auction delay. -pub const AUCTION_DELAY_KEY: &str = "auction_delay"; -/// Default lock period for new bid entries represented in eras. -pub const LOCKED_FUNDS_PERIOD_KEY: &str = "locked_funds_period"; -/// Unbonding delay expressed in eras. -pub const UNBONDING_DELAY_KEY: &str = "unbonding_delay"; diff --git a/casper_types/src/system/auction/delegator.rs b/casper_types/src/system/auction/delegator.rs deleted file mode 100644 index 7834e42b..00000000 --- a/casper_types/src/system/auction/delegator.rs +++ /dev/null @@ -1,242 +0,0 @@ -// TODO - remove once schemars stops causing warning. -#![allow(clippy::field_reassign_with_default)] - -use alloc::vec::Vec; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - system::auction::{bid::VestingSchedule, Error}, - CLType, CLTyped, PublicKey, URef, U512, -}; - -/// Represents a party delegating their stake to a validator (or "delegatee") -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[serde(deny_unknown_fields)] -pub struct Delegator { - delegator_public_key: PublicKey, - staked_amount: U512, - bonding_purse: URef, - validator_public_key: PublicKey, - vesting_schedule: Option, -} - -impl Delegator { - /// Creates a new [`Delegator`] - pub fn unlocked( - delegator_public_key: PublicKey, - staked_amount: U512, - bonding_purse: URef, - validator_public_key: PublicKey, - ) -> Self { - let vesting_schedule = None; - Delegator { - delegator_public_key, - staked_amount, - bonding_purse, - validator_public_key, - vesting_schedule, - } - } - - /// Creates new instance of a [`Delegator`] with locked funds. - pub fn locked( - delegator_public_key: PublicKey, - staked_amount: U512, - bonding_purse: URef, - validator_public_key: PublicKey, - release_timestamp_millis: u64, - ) -> Self { - let vesting_schedule = Some(VestingSchedule::new(release_timestamp_millis)); - Delegator { - delegator_public_key, - staked_amount, - bonding_purse, - validator_public_key, - vesting_schedule, - } - } - - /// Returns public key of the delegator. - pub fn delegator_public_key(&self) -> &PublicKey { - &self.delegator_public_key - } - - /// Returns the staked amount - pub fn staked_amount(&self) -> &U512 { - &self.staked_amount - } - - /// Returns the mutable staked amount - pub fn staked_amount_mut(&mut self) -> &mut U512 { - &mut self.staked_amount - } - - /// Returns the bonding purse - pub fn bonding_purse(&self) -> &URef { - &self.bonding_purse - } - - /// Returns delegatee - pub fn validator_public_key(&self) -> &PublicKey { - &self.validator_public_key - } - - /// Decreases the stake of the provided bid - pub fn decrease_stake( - &mut self, - amount: U512, - era_end_timestamp_millis: u64, - ) -> Result { - let updated_staked_amount = self - .staked_amount - .checked_sub(amount) - .ok_or(Error::InvalidAmount)?; - - let vesting_schedule = match self.vesting_schedule.as_ref() { - Some(vesting_schedule) => vesting_schedule, - None => { - self.staked_amount = updated_staked_amount; - return Ok(updated_staked_amount); - } - }; - - match vesting_schedule.locked_amount(era_end_timestamp_millis) { - Some(locked_amount) if updated_staked_amount < locked_amount => { - Err(Error::DelegatorFundsLocked) - } - None => { - // If `None`, then the locked amounts table has yet to be initialized (likely - // pre-90 day mark) - Err(Error::DelegatorFundsLocked) - } - Some(_) => { - self.staked_amount = updated_staked_amount; - Ok(updated_staked_amount) - } - } - } - - /// Increases the stake of the provided bid - pub fn increase_stake(&mut self, amount: U512) -> Result { - let updated_staked_amount = self - .staked_amount - .checked_add(amount) - .ok_or(Error::InvalidAmount)?; - - self.staked_amount = updated_staked_amount; - - Ok(updated_staked_amount) - } - - /// Returns a reference to the vesting schedule of the provided - /// delegator bid. `None` if a non-genesis validator. - pub fn vesting_schedule(&self) -> Option<&VestingSchedule> { - self.vesting_schedule.as_ref() - } - - /// Returns a mutable reference to the vesting schedule of the provided - /// delegator bid. `None` if a non-genesis validator. - pub fn vesting_schedule_mut(&mut self) -> Option<&mut VestingSchedule> { - self.vesting_schedule.as_mut() - } -} - -impl CLTyped for Delegator { - fn cl_type() -> CLType { - CLType::Any - } -} - -impl ToBytes for Delegator { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - buffer.extend(self.delegator_public_key.to_bytes()?); - buffer.extend(self.staked_amount.to_bytes()?); - buffer.extend(self.bonding_purse.to_bytes()?); - buffer.extend(self.validator_public_key.to_bytes()?); - buffer.extend(self.vesting_schedule.to_bytes()?); - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - self.delegator_public_key.serialized_length() - + self.staked_amount.serialized_length() - + self.bonding_purse.serialized_length() - + self.validator_public_key.serialized_length() - + self.vesting_schedule.serialized_length() - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.delegator_public_key.write_bytes(writer)?; - self.staked_amount.write_bytes(writer)?; - self.bonding_purse.write_bytes(writer)?; - self.validator_public_key.write_bytes(writer)?; - self.vesting_schedule.write_bytes(writer)?; - Ok(()) - } -} - -impl FromBytes for Delegator { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (delegator_public_key, bytes) = PublicKey::from_bytes(bytes)?; - let (staked_amount, bytes) = U512::from_bytes(bytes)?; - let (bonding_purse, bytes) = URef::from_bytes(bytes)?; - let (validator_public_key, bytes) = PublicKey::from_bytes(bytes)?; - let (vesting_schedule, bytes) = FromBytes::from_bytes(bytes)?; - Ok(( - Delegator { - delegator_public_key, - staked_amount, - bonding_purse, - validator_public_key, - vesting_schedule, - }, - bytes, - )) - } -} - -#[cfg(test)] -mod tests { - use crate::{ - bytesrepr, system::auction::Delegator, AccessRights, PublicKey, SecretKey, URef, U512, - }; - - #[test] - fn serialization_roundtrip() { - let staked_amount = U512::one(); - let bonding_purse = URef::new([42; 32], AccessRights::READ_ADD_WRITE); - let delegator_public_key: PublicKey = PublicKey::from( - &SecretKey::ed25519_from_bytes([42; SecretKey::ED25519_LENGTH]).unwrap(), - ); - - let validator_public_key: PublicKey = PublicKey::from( - &SecretKey::ed25519_from_bytes([43; SecretKey::ED25519_LENGTH]).unwrap(), - ); - let unlocked_delegator = Delegator::unlocked( - delegator_public_key.clone(), - staked_amount, - bonding_purse, - validator_public_key.clone(), - ); - bytesrepr::test_serialization_roundtrip(&unlocked_delegator); - - let release_timestamp_millis = 42; - let locked_delegator = Delegator::locked( - delegator_public_key, - staked_amount, - bonding_purse, - validator_public_key, - release_timestamp_millis, - ); - bytesrepr::test_serialization_roundtrip(&locked_delegator); - } -} diff --git a/casper_types/src/system/auction/entry_points.rs b/casper_types/src/system/auction/entry_points.rs deleted file mode 100644 index 69915711..00000000 --- a/casper_types/src/system/auction/entry_points.rs +++ /dev/null @@ -1,146 +0,0 @@ -use alloc::boxed::Box; - -use crate::{ - system::auction::{ - DelegationRate, ValidatorWeights, ARG_AMOUNT, ARG_DELEGATION_RATE, ARG_DELEGATOR, - ARG_ERA_END_TIMESTAMP_MILLIS, ARG_NEW_VALIDATOR, ARG_PUBLIC_KEY, ARG_REWARD_FACTORS, - ARG_VALIDATOR, ARG_VALIDATOR_PUBLIC_KEY, METHOD_ACTIVATE_BID, METHOD_ADD_BID, - METHOD_DELEGATE, METHOD_DISTRIBUTE, METHOD_GET_ERA_VALIDATORS, METHOD_READ_ERA_ID, - METHOD_REDELEGATE, METHOD_RUN_AUCTION, METHOD_SLASH, METHOD_UNDELEGATE, - METHOD_WITHDRAW_BID, - }, - CLType, CLTyped, EntryPoint, EntryPointAccess, EntryPointType, EntryPoints, Parameter, - PublicKey, U512, -}; - -/// Creates auction contract entry points. -pub fn auction_entry_points() -> EntryPoints { - let mut entry_points = EntryPoints::new(); - - let entry_point = EntryPoint::new( - METHOD_GET_ERA_VALIDATORS, - vec![], - Option::::cl_type(), - EntryPointAccess::Public, - EntryPointType::Contract, - ); - entry_points.add_entry_point(entry_point); - - let entry_point = EntryPoint::new( - METHOD_ADD_BID, - vec![ - Parameter::new(ARG_PUBLIC_KEY, PublicKey::cl_type()), - Parameter::new(ARG_DELEGATION_RATE, DelegationRate::cl_type()), - Parameter::new(ARG_AMOUNT, U512::cl_type()), - ], - U512::cl_type(), - EntryPointAccess::Public, - EntryPointType::Contract, - ); - entry_points.add_entry_point(entry_point); - - let entry_point = EntryPoint::new( - METHOD_WITHDRAW_BID, - vec![ - Parameter::new(ARG_PUBLIC_KEY, PublicKey::cl_type()), - Parameter::new(ARG_AMOUNT, U512::cl_type()), - ], - U512::cl_type(), - EntryPointAccess::Public, - EntryPointType::Contract, - ); - entry_points.add_entry_point(entry_point); - - let entry_point = EntryPoint::new( - METHOD_DELEGATE, - vec![ - Parameter::new(ARG_DELEGATOR, PublicKey::cl_type()), - Parameter::new(ARG_VALIDATOR, PublicKey::cl_type()), - Parameter::new(ARG_AMOUNT, U512::cl_type()), - ], - U512::cl_type(), - EntryPointAccess::Public, - EntryPointType::Contract, - ); - entry_points.add_entry_point(entry_point); - - let entry_point = EntryPoint::new( - METHOD_UNDELEGATE, - vec![ - Parameter::new(ARG_DELEGATOR, PublicKey::cl_type()), - Parameter::new(ARG_VALIDATOR, PublicKey::cl_type()), - Parameter::new(ARG_AMOUNT, U512::cl_type()), - ], - U512::cl_type(), - EntryPointAccess::Public, - EntryPointType::Contract, - ); - entry_points.add_entry_point(entry_point); - - let entry_point = EntryPoint::new( - METHOD_REDELEGATE, - vec![ - Parameter::new(ARG_DELEGATOR, PublicKey::cl_type()), - Parameter::new(ARG_VALIDATOR, PublicKey::cl_type()), - Parameter::new(ARG_AMOUNT, U512::cl_type()), - Parameter::new(ARG_NEW_VALIDATOR, PublicKey::cl_type()), - ], - U512::cl_type(), - EntryPointAccess::Public, - EntryPointType::Contract, - ); - entry_points.add_entry_point(entry_point); - - let entry_point = EntryPoint::new( - METHOD_RUN_AUCTION, - vec![Parameter::new(ARG_ERA_END_TIMESTAMP_MILLIS, u64::cl_type())], - CLType::Unit, - EntryPointAccess::Public, - EntryPointType::Contract, - ); - entry_points.add_entry_point(entry_point); - - let entry_point = EntryPoint::new( - METHOD_SLASH, - vec![], - CLType::Unit, - EntryPointAccess::Public, - EntryPointType::Contract, - ); - entry_points.add_entry_point(entry_point); - - let entry_point = EntryPoint::new( - METHOD_DISTRIBUTE, - vec![Parameter::new( - ARG_REWARD_FACTORS, - CLType::Map { - key: Box::new(CLType::PublicKey), - value: Box::new(CLType::U64), - }, - )], - CLType::Unit, - EntryPointAccess::Public, - EntryPointType::Contract, - ); - entry_points.add_entry_point(entry_point); - - let entry_point = EntryPoint::new( - METHOD_READ_ERA_ID, - vec![], - CLType::U64, - EntryPointAccess::Public, - EntryPointType::Contract, - ); - entry_points.add_entry_point(entry_point); - - let entry_point = EntryPoint::new( - METHOD_ACTIVATE_BID, - vec![Parameter::new(ARG_VALIDATOR_PUBLIC_KEY, CLType::PublicKey)], - CLType::Unit, - EntryPointAccess::Public, - EntryPointType::Contract, - ); - entry_points.add_entry_point(entry_point); - - entry_points -} diff --git a/casper_types/src/system/auction/era_info.rs b/casper_types/src/system/auction/era_info.rs deleted file mode 100644 index ea69dd16..00000000 --- a/casper_types/src/system/auction/era_info.rs +++ /dev/null @@ -1,314 +0,0 @@ -// TODO - remove once schemars stops causing warning. -#![allow(clippy::field_reassign_with_default)] - -use alloc::{boxed::Box, vec::Vec}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - CLType, CLTyped, PublicKey, U512, -}; - -const SEIGNIORAGE_ALLOCATION_VALIDATOR_TAG: u8 = 0; -const SEIGNIORAGE_ALLOCATION_DELEGATOR_TAG: u8 = 1; - -/// Information about a seigniorage allocation -#[derive(Debug, Clone, Ord, PartialOrd, Eq, PartialEq, Serialize, Deserialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[serde(deny_unknown_fields)] -pub enum SeigniorageAllocation { - /// Info about a seigniorage allocation for a validator - Validator { - /// Validator's public key - validator_public_key: PublicKey, - /// Allocated amount - amount: U512, - }, - /// Info about a seigniorage allocation for a delegator - Delegator { - /// Delegator's public key - delegator_public_key: PublicKey, - /// Validator's public key - validator_public_key: PublicKey, - /// Allocated amount - amount: U512, - }, -} - -impl SeigniorageAllocation { - /// Constructs a [`SeigniorageAllocation::Validator`] - pub const fn validator(validator_public_key: PublicKey, amount: U512) -> Self { - SeigniorageAllocation::Validator { - validator_public_key, - amount, - } - } - - /// Constructs a [`SeigniorageAllocation::Delegator`] - pub const fn delegator( - delegator_public_key: PublicKey, - validator_public_key: PublicKey, - amount: U512, - ) -> Self { - SeigniorageAllocation::Delegator { - delegator_public_key, - validator_public_key, - amount, - } - } - - /// Returns the amount for a given seigniorage allocation - pub fn amount(&self) -> &U512 { - match self { - SeigniorageAllocation::Validator { amount, .. } => amount, - SeigniorageAllocation::Delegator { amount, .. } => amount, - } - } - - fn tag(&self) -> u8 { - match self { - SeigniorageAllocation::Validator { .. } => SEIGNIORAGE_ALLOCATION_VALIDATOR_TAG, - SeigniorageAllocation::Delegator { .. } => SEIGNIORAGE_ALLOCATION_DELEGATOR_TAG, - } - } -} - -impl ToBytes for SeigniorageAllocation { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - self.tag().serialized_length() - + match self { - SeigniorageAllocation::Validator { - validator_public_key, - amount, - } => validator_public_key.serialized_length() + amount.serialized_length(), - SeigniorageAllocation::Delegator { - delegator_public_key, - validator_public_key, - amount, - } => { - delegator_public_key.serialized_length() - + validator_public_key.serialized_length() - + amount.serialized_length() - } - } - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - writer.push(self.tag()); - match self { - SeigniorageAllocation::Validator { - validator_public_key, - amount, - } => { - validator_public_key.write_bytes(writer)?; - amount.write_bytes(writer)?; - } - SeigniorageAllocation::Delegator { - delegator_public_key, - validator_public_key, - amount, - } => { - delegator_public_key.write_bytes(writer)?; - validator_public_key.write_bytes(writer)?; - amount.write_bytes(writer)?; - } - } - Ok(()) - } -} - -impl FromBytes for SeigniorageAllocation { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (tag, rem) = ::from_bytes(bytes)?; - match tag { - SEIGNIORAGE_ALLOCATION_VALIDATOR_TAG => { - let (validator_public_key, rem) = PublicKey::from_bytes(rem)?; - let (amount, rem) = U512::from_bytes(rem)?; - Ok(( - SeigniorageAllocation::validator(validator_public_key, amount), - rem, - )) - } - SEIGNIORAGE_ALLOCATION_DELEGATOR_TAG => { - let (delegator_public_key, rem) = PublicKey::from_bytes(rem)?; - let (validator_public_key, rem) = PublicKey::from_bytes(rem)?; - let (amount, rem) = U512::from_bytes(rem)?; - Ok(( - SeigniorageAllocation::delegator( - delegator_public_key, - validator_public_key, - amount, - ), - rem, - )) - } - _ => Err(bytesrepr::Error::Formatting), - } - } -} - -impl CLTyped for SeigniorageAllocation { - fn cl_type() -> CLType { - CLType::Any - } -} - -/// Auction metadata. Intended to be recorded at each era. -#[derive(Debug, Default, Clone, Ord, PartialOrd, Eq, PartialEq, Serialize, Deserialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[serde(deny_unknown_fields)] -pub struct EraInfo { - seigniorage_allocations: Vec, -} - -impl EraInfo { - /// Constructs a [`EraInfo`]. - pub fn new() -> Self { - let seigniorage_allocations = Vec::new(); - EraInfo { - seigniorage_allocations, - } - } - - /// Returns a reference to the seigniorage allocations collection - pub fn seigniorage_allocations(&self) -> &Vec { - &self.seigniorage_allocations - } - - /// Returns a mutable reference to the seigniorage allocations collection - pub fn seigniorage_allocations_mut(&mut self) -> &mut Vec { - &mut self.seigniorage_allocations - } - - /// Returns all seigniorage allocations that match the provided public key - /// using the following criteria: - /// * If the match candidate is a validator allocation, the provided public key is matched - /// against the validator public key. - /// * If the match candidate is a delegator allocation, the provided public key is matched - /// against the delegator public key. - pub fn select(&self, public_key: PublicKey) -> impl Iterator { - self.seigniorage_allocations - .iter() - .filter(move |allocation| match allocation { - SeigniorageAllocation::Validator { - validator_public_key, - .. - } => public_key == *validator_public_key, - SeigniorageAllocation::Delegator { - delegator_public_key, - .. - } => public_key == *delegator_public_key, - }) - } -} - -impl ToBytes for EraInfo { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut result = bytesrepr::allocate_buffer(self)?; - self.seigniorage_allocations().write_bytes(&mut result)?; - Ok(result) - } - - fn serialized_length(&self) -> usize { - self.seigniorage_allocations.serialized_length() - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.seigniorage_allocations().write_bytes(writer)?; - Ok(()) - } -} - -impl FromBytes for EraInfo { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (seigniorage_allocations, rem) = Vec::::from_bytes(bytes)?; - Ok(( - EraInfo { - seigniorage_allocations, - }, - rem, - )) - } -} - -impl CLTyped for EraInfo { - fn cl_type() -> CLType { - CLType::List(Box::new(SeigniorageAllocation::cl_type())) - } -} - -/// Generators for [`SeigniorageAllocation`] and [`EraInfo`] -#[cfg(any(feature = "testing", feature = "gens", test))] -pub mod gens { - use proptest::{ - collection::{self, SizeRange}, - prelude::Strategy, - prop_oneof, - }; - - use crate::{ - crypto::gens::public_key_arb, - gens::u512_arb, - system::auction::{EraInfo, SeigniorageAllocation}, - }; - - fn seigniorage_allocation_validator_arb() -> impl Strategy { - (public_key_arb(), u512_arb()).prop_map(|(validator_public_key, amount)| { - SeigniorageAllocation::validator(validator_public_key, amount) - }) - } - - fn seigniorage_allocation_delegator_arb() -> impl Strategy { - (public_key_arb(), public_key_arb(), u512_arb()).prop_map( - |(delegator_public_key, validator_public_key, amount)| { - SeigniorageAllocation::delegator(delegator_public_key, validator_public_key, amount) - }, - ) - } - - /// Creates an arbitrary [`SeignorageAllocation`](crate::system::auction::SeigniorageAllocation) - pub fn seigniorage_allocation_arb() -> impl Strategy { - prop_oneof![ - seigniorage_allocation_validator_arb(), - seigniorage_allocation_delegator_arb() - ] - } - - /// Creates an arbitrary [`EraInfo`] - pub fn era_info_arb(size: impl Into) -> impl Strategy { - collection::vec(seigniorage_allocation_arb(), size).prop_map(|allocations| { - let mut era_info = EraInfo::new(); - *era_info.seigniorage_allocations_mut() = allocations; - era_info - }) - } -} - -#[cfg(test)] -mod tests { - use proptest::prelude::*; - - use crate::bytesrepr; - - use super::gens; - - proptest! { - #[test] - fn test_serialization_roundtrip(era_info in gens::era_info_arb(0..32)) { - bytesrepr::test_serialization_roundtrip(&era_info) - } - } -} diff --git a/casper_types/src/system/auction/error.rs b/casper_types/src/system/auction/error.rs deleted file mode 100644 index 00bd1741..00000000 --- a/casper_types/src/system/auction/error.rs +++ /dev/null @@ -1,543 +0,0 @@ -//! Home of the Auction contract's [`enum@Error`] type. -use alloc::vec::Vec; -use core::{ - convert::{TryFrom, TryInto}, - fmt::{self, Display, Formatter}, - result, -}; - -use crate::{ - bytesrepr::{self, FromBytes, ToBytes, U8_SERIALIZED_LENGTH}, - CLType, CLTyped, -}; - -/// Errors which can occur while executing the Auction contract. -#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord)] -#[cfg_attr(test, derive(strum::EnumIter))] -#[repr(u8)] -#[non_exhaustive] -pub enum Error { - /// Unable to find named key in the contract's named keys. - /// ``` - /// # use casper_types::system::auction::Error; - /// assert_eq!(0, Error::MissingKey as u8); - /// ``` - MissingKey = 0, - /// Given named key contains invalid variant. - /// ``` - /// # use casper_types::system::auction::Error; - /// assert_eq!(1, Error::InvalidKeyVariant as u8); - /// ``` - InvalidKeyVariant = 1, - /// Value under an uref does not exist. This means the installer contract didn't work properly. - /// ``` - /// # use casper_types::system::auction::Error; - /// assert_eq!(2, Error::MissingValue as u8); - /// ``` - MissingValue = 2, - /// ABI serialization issue while reading or writing. - /// ``` - /// # use casper_types::system::auction::Error; - /// assert_eq!(3, Error::Serialization as u8); - /// ``` - Serialization = 3, - /// Triggered when contract was unable to transfer desired amount of tokens into a bid purse. - /// ``` - /// # use casper_types::system::auction::Error; - /// assert_eq!(4, Error::TransferToBidPurse as u8); - /// ``` - TransferToBidPurse = 4, - /// User passed invalid amount of tokens which might result in wrong values after calculation. - /// ``` - /// # use casper_types::system::auction::Error; - /// assert_eq!(5, Error::InvalidAmount as u8); - /// ``` - InvalidAmount = 5, - /// Unable to find a bid by account hash in `active_bids` map. - /// ``` - /// # use casper_types::system::auction::Error; - /// assert_eq!(6, Error::BidNotFound as u8); - /// ``` - BidNotFound = 6, - /// Validator's account hash was not found in the map. - /// ``` - /// # use casper_types::system::auction::Error; - /// assert_eq!(7, Error::ValidatorNotFound as u8); - /// ``` - ValidatorNotFound = 7, - /// Delegator's account hash was not found in the map. - /// ``` - /// # use casper_types::system::auction::Error; - /// assert_eq!(8, Error::DelegatorNotFound as u8); - /// ``` - DelegatorNotFound = 8, - /// Storage problem. - /// ``` - /// # use casper_types::system::auction::Error; - /// assert_eq!(9, Error::Storage as u8); - /// ``` - Storage = 9, - /// Raised when system is unable to bond. - /// ``` - /// # use casper_types::system::auction::Error; - /// assert_eq!(10, Error::Bonding as u8); - /// ``` - Bonding = 10, - /// Raised when system is unable to unbond. - /// ``` - /// # use casper_types::system::auction::Error; - /// assert_eq!(11, Error::Unbonding as u8); - /// ``` - Unbonding = 11, - /// Raised when Mint contract is unable to release founder stake. - /// ``` - /// # use casper_types::system::auction::Error; - /// assert_eq!(12, Error::ReleaseFounderStake as u8); - /// ``` - ReleaseFounderStake = 12, - /// Raised when the system is unable to determine purse balance. - /// ``` - /// # use casper_types::system::auction::Error; - /// assert_eq!(13, Error::GetBalance as u8); - /// ``` - GetBalance = 13, - /// Raised when an entry point is called from invalid account context. - /// ``` - /// # use casper_types::system::auction::Error; - /// assert_eq!(14, Error::InvalidContext as u8); - /// ``` - InvalidContext = 14, - /// Raised whenever a validator's funds are still locked in but an attempt to withdraw was - /// made. - /// ``` - /// # use casper_types::system::auction::Error; - /// assert_eq!(15, Error::ValidatorFundsLocked as u8); - /// ``` - ValidatorFundsLocked = 15, - /// Raised when caller is not the system account. - /// ``` - /// # use casper_types::system::auction::Error; - /// assert_eq!(16, Error::InvalidCaller as u8); - /// ``` - InvalidCaller = 16, - /// Raised when function is supplied a public key that does match the caller's or does not have - /// an associated account. - /// ``` - /// # use casper_types::system::auction::Error; - /// assert_eq!(17, Error::InvalidPublicKey as u8); - /// ``` - InvalidPublicKey = 17, - /// Validator is not not bonded. - /// ``` - /// # use casper_types::system::auction::Error; - /// assert_eq!(18, Error::BondNotFound as u8); - /// ``` - BondNotFound = 18, - /// Unable to create purse. - /// ``` - /// # use casper_types::system::auction::Error; - /// assert_eq!(19, Error::CreatePurseFailed as u8); - /// ``` - CreatePurseFailed = 19, - /// Attempted to unbond an amount which was too large. - /// ``` - /// # use casper_types::system::auction::Error; - /// assert_eq!(20, Error::UnbondTooLarge as u8); - /// ``` - UnbondTooLarge = 20, - /// Attempted to bond with a stake which was too small. - /// ``` - /// # use casper_types::system::auction::Error; - /// assert_eq!(21, Error::BondTooSmall as u8); - /// ``` - BondTooSmall = 21, - /// Raised when rewards are to be distributed to delegators, but the validator has no - /// delegations. - /// ``` - /// # use casper_types::system::auction::Error; - /// assert_eq!(22, Error::MissingDelegations as u8); - /// ``` - MissingDelegations = 22, - /// The validators returned by the consensus component should match - /// current era validators when distributing rewards. - /// ``` - /// # use casper_types::system::auction::Error; - /// assert_eq!(23, Error::MismatchedEraValidators as u8); - /// ``` - MismatchedEraValidators = 23, - /// Failed to mint reward tokens. - /// ``` - /// # use casper_types::system::auction::Error; - /// assert_eq!(24, Error::MintReward as u8); - /// ``` - MintReward = 24, - /// Invalid number of validator slots. - /// ``` - /// # use casper_types::system::auction::Error; - /// assert_eq!(25, Error::InvalidValidatorSlotsValue as u8); - /// ``` - InvalidValidatorSlotsValue = 25, - /// Failed to reduce total supply. - /// ``` - /// # use casper_types::system::auction::Error; - /// assert_eq!(26, Error::MintReduceTotalSupply as u8); - /// ``` - MintReduceTotalSupply = 26, - /// Triggered when contract was unable to transfer desired amount of tokens into a delegators - /// purse. - /// ``` - /// # use casper_types::system::auction::Error; - /// assert_eq!(27, Error::TransferToDelegatorPurse as u8); - /// ``` - TransferToDelegatorPurse = 27, - /// Triggered when contract was unable to perform a transfer to distribute validators reward. - /// ``` - /// # use casper_types::system::auction::Error; - /// assert_eq!(28, Error::ValidatorRewardTransfer as u8); - /// ``` - ValidatorRewardTransfer = 28, - /// Triggered when contract was unable to perform a transfer to distribute delegators rewards. - /// ``` - /// # use casper_types::system::auction::Error; - /// assert_eq!(29, Error::DelegatorRewardTransfer as u8); - /// ``` - DelegatorRewardTransfer = 29, - /// Failed to transfer desired amount while withdrawing delegators reward. - /// ``` - /// # use casper_types::system::auction::Error; - /// assert_eq!(30, Error::WithdrawDelegatorReward as u8); - /// ``` - WithdrawDelegatorReward = 30, - /// Failed to transfer desired amount while withdrawing validators reward. - /// ``` - /// # use casper_types::system::auction::Error; - /// assert_eq!(31, Error::WithdrawValidatorReward as u8); - /// ``` - WithdrawValidatorReward = 31, - /// Failed to transfer desired amount into unbonding purse. - /// ``` - /// # use casper_types::system::auction::Error; - /// assert_eq!(32, Error::TransferToUnbondingPurse as u8); - /// ``` - TransferToUnbondingPurse = 32, - /// Failed to record era info. - /// ``` - /// # use casper_types::system::auction::Error; - /// assert_eq!(33, Error::RecordEraInfo as u8); - /// ``` - RecordEraInfo = 33, - /// Failed to create a [`crate::CLValue`]. - /// ``` - /// # use casper_types::system::auction::Error; - /// assert_eq!(34, Error::CLValue as u8); - /// ``` - CLValue = 34, - /// Missing seigniorage recipients for given era. - /// ``` - /// # use casper_types::system::auction::Error; - /// assert_eq!(35, Error::MissingSeigniorageRecipients as u8); - /// ``` - MissingSeigniorageRecipients = 35, - /// Failed to transfer funds. - /// ``` - /// # use casper_types::system::auction::Error; - /// assert_eq!(36, Error::Transfer as u8); - /// ``` - Transfer = 36, - /// Delegation rate exceeds rate. - /// ``` - /// # use casper_types::system::auction::Error; - /// assert_eq!(37, Error::DelegationRateTooLarge as u8); - /// ``` - DelegationRateTooLarge = 37, - /// Raised whenever a delegator's funds are still locked in but an attempt to undelegate was - /// made. - /// ``` - /// # use casper_types::system::auction::Error; - /// assert_eq!(38, Error::DelegatorFundsLocked as u8); - /// ``` - DelegatorFundsLocked = 38, - /// An arithmetic overflow has occurred. - /// ``` - /// # use casper_types::system::auction::Error; - /// assert_eq!(39, Error::ArithmeticOverflow as u8); - /// ``` - ArithmeticOverflow = 39, - /// Execution exceeded the gas limit. - /// ``` - /// # use casper_types::system::auction::Error; - /// assert_eq!(40, Error::GasLimit as u8); - /// ``` - GasLimit = 40, - /// Too many frames on the runtime stack. - /// ``` - /// # use casper_types::system::auction::Error; - /// assert_eq!(41, Error::RuntimeStackOverflow as u8); - /// ``` - RuntimeStackOverflow = 41, - /// An error that is raised when there is an error in the mint contract that cannot - /// be mapped to a specific auction error. - /// ``` - /// # use casper_types::system::auction::Error; - /// assert_eq!(42, Error::MintError as u8); - /// ``` - MintError = 42, - /// The validator has exceeded the maximum amount of delegators allowed. - /// NOTE: This variant is no longer in use. - /// ``` - /// # use casper_types::system::auction::Error; - /// assert_eq!(43, Error::ExceededDelegatorSizeLimit as u8); - /// ``` - ExceededDelegatorSizeLimit = 43, - /// The global delegator capacity for the auction has been reached. - /// NOTE: This variant is no longer in use. - /// ``` - /// # use casper_types::system::auction::Error; - /// assert_eq!(44, Error::GlobalDelegatorCapacityReached as u8); - /// ``` - GlobalDelegatorCapacityReached = 44, - /// The delegated amount is below the minimum allowed. - /// ``` - /// # use casper_types::system::auction::Error; - /// assert_eq!(45, Error::DelegationAmountTooSmall as u8); - /// ``` - DelegationAmountTooSmall = 45, - /// Runtime stack error. - /// ``` - /// # use casper_types::system::auction::Error; - /// assert_eq!(46, Error::RuntimeStack as u8); - /// ``` - RuntimeStack = 46, - /// An error that is raised on private chain only when a `disable_auction_bids` flag is set to - /// `true`. - /// ``` - /// # use casper_types::system::auction::Error; - /// assert_eq!(47, Error::AuctionBidsDisabled as u8); - /// ``` - AuctionBidsDisabled = 47, - /// Error getting accumulation purse. - /// ``` - /// # use casper_types::system::auction::Error; - /// assert_eq!(48, Error::GetAccumulationPurse as u8); - /// ``` - GetAccumulationPurse = 48, - /// Failed to transfer desired amount into administrators account. - /// ``` - /// # use casper_types::system::auction::Error; - /// assert_eq!(49, Error::TransferToAdministrator as u8); - /// ``` - TransferToAdministrator = 49, -} - -impl Display for Error { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - match self { - Error::MissingKey => formatter.write_str("Missing key"), - Error::InvalidKeyVariant => formatter.write_str("Invalid key variant"), - Error::MissingValue => formatter.write_str("Missing value"), - Error::Serialization => formatter.write_str("Serialization error"), - Error::TransferToBidPurse => formatter.write_str("Transfer to bid purse error"), - Error::InvalidAmount => formatter.write_str("Invalid amount"), - Error::BidNotFound => formatter.write_str("Bid not found"), - Error::ValidatorNotFound => formatter.write_str("Validator not found"), - Error::DelegatorNotFound => formatter.write_str("Delegator not found"), - Error::Storage => formatter.write_str("Storage error"), - Error::Bonding => formatter.write_str("Bonding error"), - Error::Unbonding => formatter.write_str("Unbonding error"), - Error::ReleaseFounderStake => formatter.write_str("Unable to release founder stake"), - Error::GetBalance => formatter.write_str("Unable to get purse balance"), - Error::InvalidContext => formatter.write_str("Invalid context"), - Error::ValidatorFundsLocked => formatter.write_str("Validator's funds are locked"), - Error::InvalidCaller => formatter.write_str("Function must be called by system account"), - Error::InvalidPublicKey => formatter.write_str("Supplied public key does not match caller's public key or has no associated account"), - Error::BondNotFound => formatter.write_str("Validator's bond not found"), - Error::CreatePurseFailed => formatter.write_str("Unable to create purse"), - Error::UnbondTooLarge => formatter.write_str("Unbond is too large"), - Error::BondTooSmall => formatter.write_str("Bond is too small"), - Error::MissingDelegations => formatter.write_str("Validators has not received any delegations"), - Error::MismatchedEraValidators => formatter.write_str("Mismatched era validator sets to distribute rewards"), - Error::MintReward => formatter.write_str("Failed to mint rewards"), - Error::InvalidValidatorSlotsValue => formatter.write_str("Invalid number of validator slots"), - Error::MintReduceTotalSupply => formatter.write_str("Failed to reduce total supply"), - Error::TransferToDelegatorPurse => formatter.write_str("Transfer to delegators purse error"), - Error::ValidatorRewardTransfer => formatter.write_str("Reward transfer to validator error"), - Error::DelegatorRewardTransfer => formatter.write_str("Rewards transfer to delegator error"), - Error::WithdrawDelegatorReward => formatter.write_str("Withdraw delegator reward error"), - Error::WithdrawValidatorReward => formatter.write_str("Withdraw validator reward error"), - Error::TransferToUnbondingPurse => formatter.write_str("Transfer to unbonding purse error"), - Error::RecordEraInfo => formatter.write_str("Record era info error"), - Error::CLValue => formatter.write_str("CLValue error"), - Error::MissingSeigniorageRecipients => formatter.write_str("Missing seigniorage recipients for given era"), - Error::Transfer => formatter.write_str("Transfer error"), - Error::DelegationRateTooLarge => formatter.write_str("Delegation rate too large"), - Error::DelegatorFundsLocked => formatter.write_str("Delegator's funds are locked"), - Error::ArithmeticOverflow => formatter.write_str("Arithmetic overflow"), - Error::GasLimit => formatter.write_str("Execution exceeded the gas limit"), - Error::RuntimeStackOverflow => formatter.write_str("Runtime stack overflow"), - Error::MintError => formatter.write_str("An error in the mint contract execution"), - Error::ExceededDelegatorSizeLimit => formatter.write_str("The amount of delegators per validator has been exceeded"), - Error::GlobalDelegatorCapacityReached => formatter.write_str("The global delegator capacity has been reached"), - Error::DelegationAmountTooSmall => formatter.write_str("The delegated amount is below the minimum allowed"), - Error::RuntimeStack => formatter.write_str("Runtime stack error"), - Error::AuctionBidsDisabled => formatter.write_str("Auction bids are disabled"), - Error::GetAccumulationPurse => formatter.write_str("Get accumulation purse error"), - Error::TransferToAdministrator => formatter.write_str("Transfer to administrator error"), - } - } -} - -impl CLTyped for Error { - fn cl_type() -> CLType { - CLType::U8 - } -} - -// This error type is not intended to be used by third party crates. -#[doc(hidden)] -#[derive(Debug, PartialEq, Eq)] -pub struct TryFromU8ForError(()); - -// This conversion is not intended to be used by third party crates. -#[doc(hidden)] -impl TryFrom for Error { - type Error = TryFromU8ForError; - - fn try_from(value: u8) -> result::Result { - match value { - d if d == Error::MissingKey as u8 => Ok(Error::MissingKey), - d if d == Error::InvalidKeyVariant as u8 => Ok(Error::InvalidKeyVariant), - d if d == Error::MissingValue as u8 => Ok(Error::MissingValue), - d if d == Error::Serialization as u8 => Ok(Error::Serialization), - d if d == Error::TransferToBidPurse as u8 => Ok(Error::TransferToBidPurse), - d if d == Error::InvalidAmount as u8 => Ok(Error::InvalidAmount), - d if d == Error::BidNotFound as u8 => Ok(Error::BidNotFound), - d if d == Error::ValidatorNotFound as u8 => Ok(Error::ValidatorNotFound), - d if d == Error::DelegatorNotFound as u8 => Ok(Error::DelegatorNotFound), - d if d == Error::Storage as u8 => Ok(Error::Storage), - d if d == Error::Bonding as u8 => Ok(Error::Bonding), - d if d == Error::Unbonding as u8 => Ok(Error::Unbonding), - d if d == Error::ReleaseFounderStake as u8 => Ok(Error::ReleaseFounderStake), - d if d == Error::GetBalance as u8 => Ok(Error::GetBalance), - d if d == Error::InvalidContext as u8 => Ok(Error::InvalidContext), - d if d == Error::ValidatorFundsLocked as u8 => Ok(Error::ValidatorFundsLocked), - d if d == Error::InvalidCaller as u8 => Ok(Error::InvalidCaller), - d if d == Error::InvalidPublicKey as u8 => Ok(Error::InvalidPublicKey), - d if d == Error::BondNotFound as u8 => Ok(Error::BondNotFound), - d if d == Error::CreatePurseFailed as u8 => Ok(Error::CreatePurseFailed), - d if d == Error::UnbondTooLarge as u8 => Ok(Error::UnbondTooLarge), - d if d == Error::BondTooSmall as u8 => Ok(Error::BondTooSmall), - d if d == Error::MissingDelegations as u8 => Ok(Error::MissingDelegations), - d if d == Error::MismatchedEraValidators as u8 => Ok(Error::MismatchedEraValidators), - d if d == Error::MintReward as u8 => Ok(Error::MintReward), - d if d == Error::InvalidValidatorSlotsValue as u8 => { - Ok(Error::InvalidValidatorSlotsValue) - } - d if d == Error::MintReduceTotalSupply as u8 => Ok(Error::MintReduceTotalSupply), - d if d == Error::TransferToDelegatorPurse as u8 => Ok(Error::TransferToDelegatorPurse), - d if d == Error::ValidatorRewardTransfer as u8 => Ok(Error::ValidatorRewardTransfer), - d if d == Error::DelegatorRewardTransfer as u8 => Ok(Error::DelegatorRewardTransfer), - d if d == Error::WithdrawDelegatorReward as u8 => Ok(Error::WithdrawDelegatorReward), - d if d == Error::WithdrawValidatorReward as u8 => Ok(Error::WithdrawValidatorReward), - d if d == Error::TransferToUnbondingPurse as u8 => Ok(Error::TransferToUnbondingPurse), - - d if d == Error::RecordEraInfo as u8 => Ok(Error::RecordEraInfo), - d if d == Error::CLValue as u8 => Ok(Error::CLValue), - d if d == Error::MissingSeigniorageRecipients as u8 => { - Ok(Error::MissingSeigniorageRecipients) - } - d if d == Error::Transfer as u8 => Ok(Error::Transfer), - d if d == Error::DelegationRateTooLarge as u8 => Ok(Error::DelegationRateTooLarge), - d if d == Error::DelegatorFundsLocked as u8 => Ok(Error::DelegatorFundsLocked), - d if d == Error::ArithmeticOverflow as u8 => Ok(Error::ArithmeticOverflow), - d if d == Error::GasLimit as u8 => Ok(Error::GasLimit), - d if d == Error::RuntimeStackOverflow as u8 => Ok(Error::RuntimeStackOverflow), - d if d == Error::MintError as u8 => Ok(Error::MintError), - d if d == Error::ExceededDelegatorSizeLimit as u8 => { - Ok(Error::ExceededDelegatorSizeLimit) - } - d if d == Error::GlobalDelegatorCapacityReached as u8 => { - Ok(Error::GlobalDelegatorCapacityReached) - } - d if d == Error::DelegationAmountTooSmall as u8 => Ok(Error::DelegationAmountTooSmall), - d if d == Error::RuntimeStack as u8 => Ok(Error::RuntimeStack), - d if d == Error::AuctionBidsDisabled as u8 => Ok(Error::AuctionBidsDisabled), - d if d == Error::GetAccumulationPurse as u8 => Ok(Error::GetAccumulationPurse), - d if d == Error::TransferToAdministrator as u8 => Ok(Error::TransferToAdministrator), - _ => Err(TryFromU8ForError(())), - } - } -} - -impl ToBytes for Error { - fn to_bytes(&self) -> result::Result, bytesrepr::Error> { - let value = *self as u8; - value.to_bytes() - } - - fn serialized_length(&self) -> usize { - U8_SERIALIZED_LENGTH - } -} - -impl FromBytes for Error { - fn from_bytes(bytes: &[u8]) -> result::Result<(Self, &[u8]), bytesrepr::Error> { - let (value, rem): (u8, _) = FromBytes::from_bytes(bytes)?; - let error: Error = value - .try_into() - // In case an Error variant is unable to be determined it would return an - // Error::Formatting as if its unable to be correctly deserialized. - .map_err(|_| bytesrepr::Error::Formatting)?; - Ok((error, rem)) - } -} - -impl From for Error { - fn from(_: bytesrepr::Error) -> Self { - Error::Serialization - } -} - -// This error type is not intended to be used by third party crates. -#[doc(hidden)] -pub enum PurseLookupError { - KeyNotFound, - KeyUnexpectedType, -} - -impl From for Error { - fn from(error: PurseLookupError) -> Self { - match error { - PurseLookupError::KeyNotFound => Error::MissingKey, - PurseLookupError::KeyUnexpectedType => Error::InvalidKeyVariant, - } - } -} - -#[cfg(test)] -mod tests { - use strum::IntoEnumIterator; - - use super::Error; - - #[test] - fn error_forward_trips() { - for expected_error_variant in Error::iter() { - assert_eq!( - Error::try_from(expected_error_variant as u8), - Ok(expected_error_variant) - ) - } - } - - #[test] - fn error_backward_trips() { - for u8 in 0..=u8::max_value() { - match Error::try_from(u8) { - Ok(error_variant) => { - assert_eq!(u8, error_variant as u8, "Error code mismatch") - } - Err(_) => continue, - }; - } - } -} diff --git a/casper_types/src/system/auction/seigniorage_recipient.rs b/casper_types/src/system/auction/seigniorage_recipient.rs deleted file mode 100644 index 4387ca25..00000000 --- a/casper_types/src/system/auction/seigniorage_recipient.rs +++ /dev/null @@ -1,196 +0,0 @@ -use alloc::{collections::BTreeMap, vec::Vec}; - -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - system::auction::{Bid, DelegationRate}, - CLType, CLTyped, PublicKey, U512, -}; - -/// The seigniorage recipient details. -#[derive(Default, PartialEq, Eq, Clone, Debug)] -pub struct SeigniorageRecipient { - /// Validator stake (not including delegators) - stake: U512, - /// Delegation rate of a seigniorage recipient. - delegation_rate: DelegationRate, - /// Delegators and their bids. - delegator_stake: BTreeMap, -} - -impl SeigniorageRecipient { - /// Creates a new SeigniorageRecipient - pub fn new( - stake: U512, - delegation_rate: DelegationRate, - delegator_stake: BTreeMap, - ) -> Self { - Self { - stake, - delegation_rate, - delegator_stake, - } - } - - /// Returns stake of the provided recipient - pub fn stake(&self) -> &U512 { - &self.stake - } - - /// Returns delegation rate of the provided recipient - pub fn delegation_rate(&self) -> &DelegationRate { - &self.delegation_rate - } - - /// Returns delegators of the provided recipient and their stake - pub fn delegator_stake(&self) -> &BTreeMap { - &self.delegator_stake - } - - /// Calculates total stake, including delegators' total stake - pub fn total_stake(&self) -> Option { - self.delegator_total_stake()?.checked_add(self.stake) - } - - /// Calculates total stake for all delegators - pub fn delegator_total_stake(&self) -> Option { - let mut total_stake: U512 = U512::zero(); - for stake in self.delegator_stake.values() { - total_stake = total_stake.checked_add(*stake)?; - } - Some(total_stake) - } -} - -impl CLTyped for SeigniorageRecipient { - fn cl_type() -> CLType { - CLType::Any - } -} - -impl ToBytes for SeigniorageRecipient { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut result = bytesrepr::allocate_buffer(self)?; - result.extend(self.stake.to_bytes()?); - result.extend(self.delegation_rate.to_bytes()?); - result.extend(self.delegator_stake.to_bytes()?); - Ok(result) - } - - fn serialized_length(&self) -> usize { - self.stake.serialized_length() - + self.delegation_rate.serialized_length() - + self.delegator_stake.serialized_length() - } -} - -impl FromBytes for SeigniorageRecipient { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (stake, bytes) = FromBytes::from_bytes(bytes)?; - let (delegation_rate, bytes) = FromBytes::from_bytes(bytes)?; - let (delegator_stake, bytes) = FromBytes::from_bytes(bytes)?; - Ok(( - SeigniorageRecipient { - stake, - delegation_rate, - delegator_stake, - }, - bytes, - )) - } -} - -impl From<&Bid> for SeigniorageRecipient { - fn from(bid: &Bid) -> Self { - let delegator_stake = bid - .delegators() - .iter() - .map(|(public_key, delegator)| (public_key.clone(), *delegator.staked_amount())) - .collect(); - Self { - stake: *bid.staked_amount(), - delegation_rate: *bid.delegation_rate(), - delegator_stake, - } - } -} - -#[cfg(test)] -mod tests { - use alloc::collections::BTreeMap; - use core::iter::FromIterator; - - use crate::{ - bytesrepr, - system::auction::{DelegationRate, SeigniorageRecipient}, - PublicKey, SecretKey, U512, - }; - - #[test] - fn serialization_roundtrip() { - let delegator_1_key = PublicKey::from( - &SecretKey::ed25519_from_bytes([42; SecretKey::ED25519_LENGTH]).unwrap(), - ); - let delegator_2_key = PublicKey::from( - &SecretKey::ed25519_from_bytes([43; SecretKey::ED25519_LENGTH]).unwrap(), - ); - let delegator_3_key = PublicKey::from( - &SecretKey::ed25519_from_bytes([44; SecretKey::ED25519_LENGTH]).unwrap(), - ); - let seigniorage_recipient = SeigniorageRecipient { - stake: U512::max_value(), - delegation_rate: DelegationRate::max_value(), - delegator_stake: BTreeMap::from_iter(vec![ - (delegator_1_key, U512::max_value()), - (delegator_2_key, U512::max_value()), - (delegator_3_key, U512::zero()), - ]), - }; - bytesrepr::test_serialization_roundtrip(&seigniorage_recipient); - } - - #[test] - fn test_overflow_in_delegation_rate() { - let delegator_1_key = PublicKey::from( - &SecretKey::ed25519_from_bytes([42; SecretKey::ED25519_LENGTH]).unwrap(), - ); - let delegator_2_key = PublicKey::from( - &SecretKey::ed25519_from_bytes([43; SecretKey::ED25519_LENGTH]).unwrap(), - ); - let delegator_3_key = PublicKey::from( - &SecretKey::ed25519_from_bytes([44; SecretKey::ED25519_LENGTH]).unwrap(), - ); - let seigniorage_recipient = SeigniorageRecipient { - stake: U512::max_value(), - delegation_rate: DelegationRate::max_value(), - delegator_stake: BTreeMap::from_iter(vec![ - (delegator_1_key, U512::max_value()), - (delegator_2_key, U512::max_value()), - (delegator_3_key, U512::zero()), - ]), - }; - assert_eq!(seigniorage_recipient.total_stake(), None) - } - - #[test] - fn test_overflow_in_delegation_total_stake() { - let delegator_1_key = PublicKey::from( - &SecretKey::ed25519_from_bytes([42; SecretKey::ED25519_LENGTH]).unwrap(), - ); - let delegator_2_key = PublicKey::from( - &SecretKey::ed25519_from_bytes([43; SecretKey::ED25519_LENGTH]).unwrap(), - ); - let delegator_3_key = PublicKey::from( - &SecretKey::ed25519_from_bytes([44; SecretKey::ED25519_LENGTH]).unwrap(), - ); - let seigniorage_recipient = SeigniorageRecipient { - stake: U512::max_value(), - delegation_rate: DelegationRate::max_value(), - delegator_stake: BTreeMap::from_iter(vec![ - (delegator_1_key, U512::max_value()), - (delegator_2_key, U512::max_value()), - (delegator_3_key, U512::max_value()), - ]), - }; - assert_eq!(seigniorage_recipient.delegator_total_stake(), None) - } -} diff --git a/casper_types/src/system/auction/unbonding_purse.rs b/casper_types/src/system/auction/unbonding_purse.rs deleted file mode 100644 index 1f36d828..00000000 --- a/casper_types/src/system/auction/unbonding_purse.rs +++ /dev/null @@ -1,236 +0,0 @@ -// TODO - remove once schemars stops causing warning. -#![allow(clippy::field_reassign_with_default)] - -use alloc::vec::Vec; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - CLType, CLTyped, EraId, PublicKey, URef, U512, -}; - -use super::WithdrawPurse; - -/// Unbonding purse. -#[derive(PartialEq, Eq, Debug, Serialize, Deserialize, Clone)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[serde(deny_unknown_fields)] -pub struct UnbondingPurse { - /// Bonding Purse - bonding_purse: URef, - /// Validators public key. - validator_public_key: PublicKey, - /// Unbonders public key. - unbonder_public_key: PublicKey, - /// Era in which this unbonding request was created. - era_of_creation: EraId, - /// Unbonding Amount. - amount: U512, - /// The validator public key to re-delegate to. - new_validator: Option, -} - -impl UnbondingPurse { - /// Creates [`UnbondingPurse`] instance for an unbonding request. - pub const fn new( - bonding_purse: URef, - validator_public_key: PublicKey, - unbonder_public_key: PublicKey, - era_of_creation: EraId, - amount: U512, - new_validator: Option, - ) -> Self { - Self { - bonding_purse, - validator_public_key, - unbonder_public_key, - era_of_creation, - amount, - new_validator, - } - } - - /// Checks if given request is made by a validator by checking if public key of unbonder is same - /// as a key owned by validator. - pub fn is_validator(&self) -> bool { - self.validator_public_key == self.unbonder_public_key - } - - /// Returns bonding purse used to make this unbonding request. - pub fn bonding_purse(&self) -> &URef { - &self.bonding_purse - } - - /// Returns public key of validator. - pub fn validator_public_key(&self) -> &PublicKey { - &self.validator_public_key - } - - /// Returns public key of unbonder. - /// - /// For withdrawal requests that originated from validator's public key through `withdraw_bid` - /// entrypoint this is equal to [`UnbondingPurse::validator_public_key`] and - /// [`UnbondingPurse::is_validator`] is `true`. - pub fn unbonder_public_key(&self) -> &PublicKey { - &self.unbonder_public_key - } - - /// Returns era which was used to create this unbonding request. - pub fn era_of_creation(&self) -> EraId { - self.era_of_creation - } - - /// Returns unbonding amount. - pub fn amount(&self) -> &U512 { - &self.amount - } - - /// Returns the public key for the new validator. - pub fn new_validator(&self) -> &Option { - &self.new_validator - } -} - -impl ToBytes for UnbondingPurse { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut result = bytesrepr::allocate_buffer(self)?; - result.extend(&self.bonding_purse.to_bytes()?); - result.extend(&self.validator_public_key.to_bytes()?); - result.extend(&self.unbonder_public_key.to_bytes()?); - result.extend(&self.era_of_creation.to_bytes()?); - result.extend(&self.amount.to_bytes()?); - result.extend(&self.new_validator.to_bytes()?); - Ok(result) - } - fn serialized_length(&self) -> usize { - self.bonding_purse.serialized_length() - + self.validator_public_key.serialized_length() - + self.unbonder_public_key.serialized_length() - + self.era_of_creation.serialized_length() - + self.amount.serialized_length() - + self.new_validator.serialized_length() - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.bonding_purse.write_bytes(writer)?; - self.validator_public_key.write_bytes(writer)?; - self.unbonder_public_key.write_bytes(writer)?; - self.era_of_creation.write_bytes(writer)?; - self.amount.write_bytes(writer)?; - self.new_validator.write_bytes(writer)?; - Ok(()) - } -} - -impl FromBytes for UnbondingPurse { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (bonding_purse, remainder) = FromBytes::from_bytes(bytes)?; - let (validator_public_key, remainder) = FromBytes::from_bytes(remainder)?; - let (unbonder_public_key, remainder) = FromBytes::from_bytes(remainder)?; - let (era_of_creation, remainder) = FromBytes::from_bytes(remainder)?; - let (amount, remainder) = FromBytes::from_bytes(remainder)?; - let (new_validator, remainder) = Option::::from_bytes(remainder)?; - - Ok(( - UnbondingPurse { - bonding_purse, - validator_public_key, - unbonder_public_key, - era_of_creation, - amount, - new_validator, - }, - remainder, - )) - } -} - -impl CLTyped for UnbondingPurse { - fn cl_type() -> CLType { - CLType::Any - } -} - -impl From for UnbondingPurse { - fn from(withdraw_purse: WithdrawPurse) -> Self { - UnbondingPurse::new( - withdraw_purse.bonding_purse, - withdraw_purse.validator_public_key, - withdraw_purse.unbonder_public_key, - withdraw_purse.era_of_creation, - withdraw_purse.amount, - None, - ) - } -} - -#[cfg(test)] -mod tests { - use crate::{ - bytesrepr, system::auction::UnbondingPurse, AccessRights, EraId, PublicKey, SecretKey, - URef, U512, - }; - - const BONDING_PURSE: URef = URef::new([14; 32], AccessRights::READ_ADD_WRITE); - const ERA_OF_WITHDRAWAL: EraId = EraId::MAX; - - fn validator_public_key() -> PublicKey { - let secret_key = SecretKey::ed25519_from_bytes([42; SecretKey::ED25519_LENGTH]).unwrap(); - PublicKey::from(&secret_key) - } - - fn unbonder_public_key() -> PublicKey { - let secret_key = SecretKey::ed25519_from_bytes([43; SecretKey::ED25519_LENGTH]).unwrap(); - PublicKey::from(&secret_key) - } - - fn amount() -> U512 { - U512::max_value() - 1 - } - - #[test] - fn serialization_roundtrip_for_unbonding_purse() { - let unbonding_purse = UnbondingPurse { - bonding_purse: BONDING_PURSE, - validator_public_key: validator_public_key(), - unbonder_public_key: unbonder_public_key(), - era_of_creation: ERA_OF_WITHDRAWAL, - amount: amount(), - new_validator: None, - }; - - bytesrepr::test_serialization_roundtrip(&unbonding_purse); - } - - #[test] - fn should_be_validator_condition_for_unbonding_purse() { - let validator_unbonding_purse = UnbondingPurse::new( - BONDING_PURSE, - validator_public_key(), - validator_public_key(), - ERA_OF_WITHDRAWAL, - amount(), - None, - ); - assert!(validator_unbonding_purse.is_validator()); - } - - #[test] - fn should_be_delegator_condition_for_unbonding_purse() { - let delegator_unbonding_purse = UnbondingPurse::new( - BONDING_PURSE, - validator_public_key(), - unbonder_public_key(), - ERA_OF_WITHDRAWAL, - amount(), - None, - ); - assert!(!delegator_unbonding_purse.is_validator()); - } -} diff --git a/casper_types/src/system/auction/withdraw_purse.rs b/casper_types/src/system/auction/withdraw_purse.rs deleted file mode 100644 index b79ee1e5..00000000 --- a/casper_types/src/system/auction/withdraw_purse.rs +++ /dev/null @@ -1,195 +0,0 @@ -// TODO - remove once schemars stops causing warning. -#![allow(clippy::field_reassign_with_default)] - -use alloc::vec::Vec; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - CLType, CLTyped, EraId, PublicKey, URef, U512, -}; - -/// A withdraw purse, a legacy structure. -#[derive(PartialEq, Eq, Debug, Serialize, Deserialize, Clone)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[serde(deny_unknown_fields)] -pub struct WithdrawPurse { - /// Bonding Purse - pub(crate) bonding_purse: URef, - /// Validators public key. - pub(crate) validator_public_key: PublicKey, - /// Unbonders public key. - pub(crate) unbonder_public_key: PublicKey, - /// Era in which this unbonding request was created. - pub(crate) era_of_creation: EraId, - /// Unbonding Amount. - pub(crate) amount: U512, -} - -impl WithdrawPurse { - /// Creates [`WithdrawPurse`] instance for an unbonding request. - pub const fn new( - bonding_purse: URef, - validator_public_key: PublicKey, - unbonder_public_key: PublicKey, - era_of_creation: EraId, - amount: U512, - ) -> Self { - Self { - bonding_purse, - validator_public_key, - unbonder_public_key, - era_of_creation, - amount, - } - } - - /// Checks if given request is made by a validator by checking if public key of unbonder is same - /// as a key owned by validator. - pub fn is_validator(&self) -> bool { - self.validator_public_key == self.unbonder_public_key - } - - /// Returns bonding purse used to make this unbonding request. - pub fn bonding_purse(&self) -> &URef { - &self.bonding_purse - } - - /// Returns public key of validator. - pub fn validator_public_key(&self) -> &PublicKey { - &self.validator_public_key - } - - /// Returns public key of unbonder. - /// - /// For withdrawal requests that originated from validator's public key through `withdraw_bid` - /// entrypoint this is equal to [`WithdrawPurse::validator_public_key`] and - /// [`WithdrawPurse::is_validator`] is `true`. - pub fn unbonder_public_key(&self) -> &PublicKey { - &self.unbonder_public_key - } - - /// Returns era which was used to create this unbonding request. - pub fn era_of_creation(&self) -> EraId { - self.era_of_creation - } - - /// Returns unbonding amount. - pub fn amount(&self) -> &U512 { - &self.amount - } -} - -impl ToBytes for WithdrawPurse { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut result = bytesrepr::allocate_buffer(self)?; - result.extend(&self.bonding_purse.to_bytes()?); - result.extend(&self.validator_public_key.to_bytes()?); - result.extend(&self.unbonder_public_key.to_bytes()?); - result.extend(&self.era_of_creation.to_bytes()?); - result.extend(&self.amount.to_bytes()?); - - Ok(result) - } - fn serialized_length(&self) -> usize { - self.bonding_purse.serialized_length() - + self.validator_public_key.serialized_length() - + self.unbonder_public_key.serialized_length() - + self.era_of_creation.serialized_length() - + self.amount.serialized_length() - } -} - -impl FromBytes for WithdrawPurse { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (bonding_purse, remainder) = FromBytes::from_bytes(bytes)?; - let (validator_public_key, remainder) = FromBytes::from_bytes(remainder)?; - let (unbonder_public_key, remainder) = FromBytes::from_bytes(remainder)?; - let (era_of_creation, remainder) = FromBytes::from_bytes(remainder)?; - let (amount, remainder) = FromBytes::from_bytes(remainder)?; - - Ok(( - WithdrawPurse { - bonding_purse, - validator_public_key, - unbonder_public_key, - era_of_creation, - amount, - }, - remainder, - )) - } -} - -impl CLTyped for WithdrawPurse { - fn cl_type() -> CLType { - CLType::Any - } -} - -#[cfg(test)] -mod tests { - use crate::{bytesrepr, AccessRights, EraId, PublicKey, SecretKey, URef, U512}; - - use super::WithdrawPurse; - - const BONDING_PURSE: URef = URef::new([41; 32], AccessRights::READ_ADD_WRITE); - const ERA_OF_WITHDRAWAL: EraId = EraId::MAX; - - fn validator_public_key() -> PublicKey { - let secret_key = SecretKey::ed25519_from_bytes([44; SecretKey::ED25519_LENGTH]).unwrap(); - PublicKey::from(&secret_key) - } - - fn unbonder_public_key() -> PublicKey { - let secret_key = SecretKey::ed25519_from_bytes([45; SecretKey::ED25519_LENGTH]).unwrap(); - PublicKey::from(&secret_key) - } - - fn amount() -> U512 { - U512::max_value() - 1 - } - - #[test] - fn serialization_roundtrip_for_withdraw_purse() { - let withdraw_purse = WithdrawPurse { - bonding_purse: BONDING_PURSE, - validator_public_key: validator_public_key(), - unbonder_public_key: unbonder_public_key(), - era_of_creation: ERA_OF_WITHDRAWAL, - amount: amount(), - }; - - bytesrepr::test_serialization_roundtrip(&withdraw_purse); - } - - #[test] - fn should_be_validator_condition_for_withdraw_purse() { - let validator_withdraw_purse = WithdrawPurse::new( - BONDING_PURSE, - validator_public_key(), - validator_public_key(), - ERA_OF_WITHDRAWAL, - amount(), - ); - assert!(validator_withdraw_purse.is_validator()); - } - - #[test] - fn should_be_delegator_condition_for_withdraw_purse() { - let delegator_withdraw_purse = WithdrawPurse::new( - BONDING_PURSE, - validator_public_key(), - unbonder_public_key(), - ERA_OF_WITHDRAWAL, - amount(), - ); - assert!(!delegator_withdraw_purse.is_validator()); - } -} diff --git a/casper_types/src/system/call_stack_element.rs b/casper_types/src/system/call_stack_element.rs deleted file mode 100644 index e0741f0c..00000000 --- a/casper_types/src/system/call_stack_element.rs +++ /dev/null @@ -1,194 +0,0 @@ -use alloc::vec::Vec; - -use num_derive::{FromPrimitive, ToPrimitive}; -use num_traits::FromPrimitive; - -use crate::{ - account::AccountHash, - bytesrepr::{self, FromBytes, ToBytes, U8_SERIALIZED_LENGTH}, - CLType, CLTyped, ContractHash, ContractPackageHash, -}; - -/// Tag representing variants of CallStackElement for purposes of serialization. -#[derive(FromPrimitive, ToPrimitive)] -#[repr(u8)] -pub enum CallStackElementTag { - /// Session tag. - Session = 0, - /// StoredSession tag. - StoredSession, - /// StoredContract tag. - StoredContract, -} - -/// Represents the origin of a sub-call. -#[derive(Clone, Debug, PartialEq, Eq)] -pub enum CallStackElement { - /// Session - Session { - /// The account hash of the caller - account_hash: AccountHash, - }, - /// Effectively an EntryPointType::Session - stored access to a session. - StoredSession { - /// The account hash of the caller - account_hash: AccountHash, - /// The contract package hash - contract_package_hash: ContractPackageHash, - /// The contract hash - contract_hash: ContractHash, - }, - /// Contract - StoredContract { - /// The contract package hash - contract_package_hash: ContractPackageHash, - /// The contract hash - contract_hash: ContractHash, - }, -} - -impl CallStackElement { - /// Creates a [`CallStackElement::Session`]. This represents a call into session code, and - /// should only ever happen once in a call stack. - pub fn session(account_hash: AccountHash) -> Self { - CallStackElement::Session { account_hash } - } - - /// Creates a [`'CallStackElement::StoredContract`]. This represents a call into a contract with - /// `EntryPointType::Contract`. - pub fn stored_contract( - contract_package_hash: ContractPackageHash, - contract_hash: ContractHash, - ) -> Self { - CallStackElement::StoredContract { - contract_package_hash, - contract_hash, - } - } - - /// Creates a [`'CallStackElement::StoredSession`]. This represents a call into a contract with - /// `EntryPointType::Session`. - pub fn stored_session( - account_hash: AccountHash, - contract_package_hash: ContractPackageHash, - contract_hash: ContractHash, - ) -> Self { - CallStackElement::StoredSession { - account_hash, - contract_package_hash, - contract_hash, - } - } - - /// Gets the tag from self. - pub fn tag(&self) -> CallStackElementTag { - match self { - CallStackElement::Session { .. } => CallStackElementTag::Session, - CallStackElement::StoredSession { .. } => CallStackElementTag::StoredSession, - CallStackElement::StoredContract { .. } => CallStackElementTag::StoredContract, - } - } - - /// Gets the [`ContractHash`] for both stored session and stored contract variants. - pub fn contract_hash(&self) -> Option<&ContractHash> { - match self { - CallStackElement::Session { .. } => None, - CallStackElement::StoredSession { contract_hash, .. } - | CallStackElement::StoredContract { contract_hash, .. } => Some(contract_hash), - } - } -} - -impl ToBytes for CallStackElement { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut result = bytesrepr::allocate_buffer(self)?; - result.push(self.tag() as u8); - match self { - CallStackElement::Session { account_hash } => { - result.append(&mut account_hash.to_bytes()?) - } - CallStackElement::StoredSession { - account_hash, - contract_package_hash, - contract_hash, - } => { - result.append(&mut account_hash.to_bytes()?); - result.append(&mut contract_package_hash.to_bytes()?); - result.append(&mut contract_hash.to_bytes()?); - } - CallStackElement::StoredContract { - contract_package_hash, - contract_hash, - } => { - result.append(&mut contract_package_hash.to_bytes()?); - result.append(&mut contract_hash.to_bytes()?); - } - }; - Ok(result) - } - - fn serialized_length(&self) -> usize { - U8_SERIALIZED_LENGTH - + match self { - CallStackElement::Session { account_hash } => account_hash.serialized_length(), - CallStackElement::StoredSession { - account_hash, - contract_package_hash, - contract_hash, - } => { - account_hash.serialized_length() - + contract_package_hash.serialized_length() - + contract_hash.serialized_length() - } - CallStackElement::StoredContract { - contract_package_hash, - contract_hash, - } => contract_package_hash.serialized_length() + contract_hash.serialized_length(), - } - } -} - -impl FromBytes for CallStackElement { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (tag, remainder): (u8, &[u8]) = FromBytes::from_bytes(bytes)?; - let tag = CallStackElementTag::from_u8(tag).ok_or(bytesrepr::Error::Formatting)?; - match tag { - CallStackElementTag::Session => { - let (account_hash, remainder) = AccountHash::from_bytes(remainder)?; - Ok((CallStackElement::Session { account_hash }, remainder)) - } - CallStackElementTag::StoredSession => { - let (account_hash, remainder) = AccountHash::from_bytes(remainder)?; - let (contract_package_hash, remainder) = - ContractPackageHash::from_bytes(remainder)?; - let (contract_hash, remainder) = ContractHash::from_bytes(remainder)?; - Ok(( - CallStackElement::StoredSession { - account_hash, - contract_package_hash, - contract_hash, - }, - remainder, - )) - } - CallStackElementTag::StoredContract => { - let (contract_package_hash, remainder) = - ContractPackageHash::from_bytes(remainder)?; - let (contract_hash, remainder) = ContractHash::from_bytes(remainder)?; - Ok(( - CallStackElement::StoredContract { - contract_package_hash, - contract_hash, - }, - remainder, - )) - } - } - } -} - -impl CLTyped for CallStackElement { - fn cl_type() -> CLType { - CLType::Any - } -} diff --git a/casper_types/src/system/error.rs b/casper_types/src/system/error.rs deleted file mode 100644 index c63e3f58..00000000 --- a/casper_types/src/system/error.rs +++ /dev/null @@ -1,43 +0,0 @@ -use core::fmt::{self, Display, Formatter}; - -use crate::system::{auction, handle_payment, mint}; - -/// An aggregate enum error with variants for each system contract's error. -#[derive(Debug, Copy, Clone)] -#[non_exhaustive] -pub enum Error { - /// Contains a [`mint::Error`]. - Mint(mint::Error), - /// Contains a [`handle_payment::Error`]. - HandlePayment(handle_payment::Error), - /// Contains a [`auction::Error`]. - Auction(auction::Error), -} - -impl From for Error { - fn from(error: mint::Error) -> Error { - Error::Mint(error) - } -} - -impl From for Error { - fn from(error: handle_payment::Error) -> Error { - Error::HandlePayment(error) - } -} - -impl From for Error { - fn from(error: auction::Error) -> Error { - Error::Auction(error) - } -} - -impl Display for Error { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - match self { - Error::Mint(error) => write!(formatter, "Mint error: {}", error), - Error::HandlePayment(error) => write!(formatter, "HandlePayment error: {}", error), - Error::Auction(error) => write!(formatter, "Auction error: {}", error), - } - } -} diff --git a/casper_types/src/system/handle_payment.rs b/casper_types/src/system/handle_payment.rs deleted file mode 100644 index 1b12f3ec..00000000 --- a/casper_types/src/system/handle_payment.rs +++ /dev/null @@ -1,8 +0,0 @@ -//! Contains implementation of a Handle Payment contract functionality. -mod constants; -mod entry_points; -mod error; - -pub use constants::*; -pub use entry_points::handle_payment_entry_points; -pub use error::Error; diff --git a/casper_types/src/system/handle_payment/constants.rs b/casper_types/src/system/handle_payment/constants.rs deleted file mode 100644 index ef0feedd..00000000 --- a/casper_types/src/system/handle_payment/constants.rs +++ /dev/null @@ -1,37 +0,0 @@ -/// Named constant for `purse`. -pub const ARG_PURSE: &str = "purse"; -/// Named constant for `amount`. -pub const ARG_AMOUNT: &str = "amount"; -/// Named constant for `source`. -pub const ARG_ACCOUNT: &str = "account"; -/// Named constant for `target`. -pub const ARG_TARGET: &str = "target"; - -/// Named constant for method `get_payment_purse`. -pub const METHOD_GET_PAYMENT_PURSE: &str = "get_payment_purse"; -/// Named constant for method `set_refund_purse`. -pub const METHOD_SET_REFUND_PURSE: &str = "set_refund_purse"; -/// Named constant for method `get_refund_purse`. -pub const METHOD_GET_REFUND_PURSE: &str = "get_refund_purse"; -/// Named constant for method `finalize_payment`. -pub const METHOD_FINALIZE_PAYMENT: &str = "finalize_payment"; -/// Named constant for method `distribute_accumulated_fees`. -pub const METHOD_DISTRIBUTE_ACCUMULATED_FEES: &str = "distribute_accumulated_fees"; - -/// Storage for handle payment contract hash. -pub const CONTRACT_HASH_KEY: &str = "contract_hash"; - -/// Storage for handle payment access key. -pub const CONTRACT_ACCESS_KEY: &str = "access_key"; - -/// The uref name where the Handle Payment accepts payment for computation on behalf of validators. -pub const PAYMENT_PURSE_KEY: &str = "payment_purse"; - -/// The uref name where the Handle Payment will refund unused payment back to the user. The uref -/// this name corresponds to is set by the user. -pub const REFUND_PURSE_KEY: &str = "refund_purse"; -/// Storage for handle payment accumulation purse key. -/// -/// This purse is used when `fee_elimination` config is set to `Accumulate` which makes sense for -/// some private chains. -pub const ACCUMULATION_PURSE_KEY: &str = "accumulation_purse"; diff --git a/casper_types/src/system/handle_payment/entry_points.rs b/casper_types/src/system/handle_payment/entry_points.rs deleted file mode 100644 index 9f5c032e..00000000 --- a/casper_types/src/system/handle_payment/entry_points.rs +++ /dev/null @@ -1,66 +0,0 @@ -use alloc::boxed::Box; - -use crate::{ - system::handle_payment::{ - ARG_ACCOUNT, ARG_AMOUNT, ARG_PURSE, METHOD_FINALIZE_PAYMENT, METHOD_GET_PAYMENT_PURSE, - METHOD_GET_REFUND_PURSE, METHOD_SET_REFUND_PURSE, - }, - CLType, EntryPoint, EntryPointAccess, EntryPointType, EntryPoints, Parameter, -}; - -use super::METHOD_DISTRIBUTE_ACCUMULATED_FEES; - -/// Creates handle payment contract entry points. -pub fn handle_payment_entry_points() -> EntryPoints { - let mut entry_points = EntryPoints::new(); - - let get_payment_purse = EntryPoint::new( - METHOD_GET_PAYMENT_PURSE, - vec![], - CLType::URef, - EntryPointAccess::Public, - EntryPointType::Contract, - ); - entry_points.add_entry_point(get_payment_purse); - - let set_refund_purse = EntryPoint::new( - METHOD_SET_REFUND_PURSE, - vec![Parameter::new(ARG_PURSE, CLType::URef)], - CLType::Unit, - EntryPointAccess::Public, - EntryPointType::Contract, - ); - entry_points.add_entry_point(set_refund_purse); - - let get_refund_purse = EntryPoint::new( - METHOD_GET_REFUND_PURSE, - vec![], - CLType::Option(Box::new(CLType::URef)), - EntryPointAccess::Public, - EntryPointType::Contract, - ); - entry_points.add_entry_point(get_refund_purse); - - let finalize_payment = EntryPoint::new( - METHOD_FINALIZE_PAYMENT, - vec![ - Parameter::new(ARG_AMOUNT, CLType::U512), - Parameter::new(ARG_ACCOUNT, CLType::ByteArray(32)), - ], - CLType::Unit, - EntryPointAccess::Public, - EntryPointType::Contract, - ); - entry_points.add_entry_point(finalize_payment); - - let distribute_accumulated_fees = EntryPoint::new( - METHOD_DISTRIBUTE_ACCUMULATED_FEES, - vec![], - CLType::Unit, - EntryPointAccess::Public, - EntryPointType::Contract, - ); - entry_points.add_entry_point(distribute_accumulated_fees); - - entry_points -} diff --git a/casper_types/src/system/handle_payment/error.rs b/casper_types/src/system/handle_payment/error.rs deleted file mode 100644 index 77867a36..00000000 --- a/casper_types/src/system/handle_payment/error.rs +++ /dev/null @@ -1,424 +0,0 @@ -//! Home of the Handle Payment contract's [`enum@Error`] type. -use alloc::vec::Vec; -use core::{ - convert::TryFrom, - fmt::{self, Display, Formatter}, - result, -}; - -use crate::{ - bytesrepr::{self, ToBytes, U8_SERIALIZED_LENGTH}, - CLType, CLTyped, -}; - -/// Errors which can occur while executing the Handle Payment contract. -// TODO: Split this up into user errors vs. system errors. -#[derive(Debug, Copy, Clone, PartialEq, Eq)] -#[repr(u8)] -#[non_exhaustive] -pub enum Error { - // ===== User errors ===== - /// The given validator is not bonded. - /// ``` - /// # use casper_types::system::handle_payment::Error; - /// assert_eq!(0, Error::NotBonded as u8); - /// ``` - NotBonded = 0, - /// There are too many bonding or unbonding attempts already enqueued to allow more. - /// ``` - /// # use casper_types::system::handle_payment::Error; - /// assert_eq!(1, Error::TooManyEventsInQueue as u8); - /// ``` - TooManyEventsInQueue = 1, - /// At least one validator must remain bonded. - /// ``` - /// # use casper_types::system::handle_payment::Error; - /// assert_eq!(2, Error::CannotUnbondLastValidator as u8); - /// ``` - CannotUnbondLastValidator = 2, - /// Failed to bond or unbond as this would have resulted in exceeding the maximum allowed - /// difference between the largest and smallest stakes. - /// ``` - /// # use casper_types::system::handle_payment::Error; - /// assert_eq!(3, Error::SpreadTooHigh as u8); - /// ``` - SpreadTooHigh = 3, - /// The given validator already has a bond or unbond attempt enqueued. - /// ``` - /// # use casper_types::system::handle_payment::Error; - /// assert_eq!(4, Error::MultipleRequests as u8); - /// ``` - MultipleRequests = 4, - /// Attempted to bond with a stake which was too small. - /// ``` - /// # use casper_types::system::handle_payment::Error; - /// assert_eq!(5, Error::BondTooSmall as u8); - /// ``` - BondTooSmall = 5, - /// Attempted to bond with a stake which was too large. - /// ``` - /// # use casper_types::system::handle_payment::Error; - /// assert_eq!(6, Error::BondTooLarge as u8); - /// ``` - BondTooLarge = 6, - /// Attempted to unbond an amount which was too large. - /// ``` - /// # use casper_types::system::handle_payment::Error; - /// assert_eq!(7, Error::UnbondTooLarge as u8); - /// ``` - UnbondTooLarge = 7, - /// While bonding, the transfer from source purse to the Handle Payment internal purse failed. - /// ``` - /// # use casper_types::system::handle_payment::Error; - /// assert_eq!(8, Error::BondTransferFailed as u8); - /// ``` - BondTransferFailed = 8, - /// While unbonding, the transfer from the Handle Payment internal purse to the destination - /// purse failed. - /// ``` - /// # use casper_types::system::handle_payment::Error; - /// assert_eq!(9, Error::UnbondTransferFailed as u8); - /// ``` - UnbondTransferFailed = 9, - // ===== System errors ===== - /// Internal error: a [`BlockTime`](crate::BlockTime) was unexpectedly out of sequence. - /// ``` - /// # use casper_types::system::handle_payment::Error; - /// assert_eq!(10, Error::TimeWentBackwards as u8); - /// ``` - TimeWentBackwards = 10, - /// Internal error: stakes were unexpectedly empty. - /// ``` - /// # use casper_types::system::handle_payment::Error; - /// assert_eq!(11, Error::StakesNotFound as u8); - /// ``` - StakesNotFound = 11, - /// Internal error: the Handle Payment contract's payment purse wasn't found. - /// ``` - /// # use casper_types::system::handle_payment::Error; - /// assert_eq!(12, Error::PaymentPurseNotFound as u8); - /// ``` - PaymentPurseNotFound = 12, - /// Internal error: the Handle Payment contract's payment purse key was the wrong type. - /// ``` - /// # use casper_types::system::handle_payment::Error; - /// assert_eq!(13, Error::PaymentPurseKeyUnexpectedType as u8); - /// ``` - PaymentPurseKeyUnexpectedType = 13, - /// Internal error: couldn't retrieve the balance for the Handle Payment contract's payment - /// purse. - /// ``` - /// # use casper_types::system::handle_payment::Error; - /// assert_eq!(14, Error::PaymentPurseBalanceNotFound as u8); - /// ``` - PaymentPurseBalanceNotFound = 14, - /// Internal error: the Handle Payment contract's bonding purse wasn't found. - /// ``` - /// # use casper_types::system::handle_payment::Error; - /// assert_eq!(15, Error::BondingPurseNotFound as u8); - /// ``` - BondingPurseNotFound = 15, - /// Internal error: the Handle Payment contract's bonding purse key was the wrong type. - /// ``` - /// # use casper_types::system::handle_payment::Error; - /// assert_eq!(16, Error::BondingPurseKeyUnexpectedType as u8); - /// ``` - BondingPurseKeyUnexpectedType = 16, - /// Internal error: the Handle Payment contract's refund purse key was the wrong type. - /// ``` - /// # use casper_types::system::handle_payment::Error; - /// assert_eq!(17, Error::RefundPurseKeyUnexpectedType as u8); - /// ``` - RefundPurseKeyUnexpectedType = 17, - /// Internal error: the Handle Payment contract's rewards purse wasn't found. - /// ``` - /// # use casper_types::system::handle_payment::Error; - /// assert_eq!(18, Error::RewardsPurseNotFound as u8); - /// ``` - RewardsPurseNotFound = 18, - /// Internal error: the Handle Payment contract's rewards purse key was the wrong type. - /// ``` - /// # use casper_types::system::handle_payment::Error; - /// assert_eq!(19, Error::RewardsPurseKeyUnexpectedType as u8); - /// ``` - RewardsPurseKeyUnexpectedType = 19, - // TODO: Put these in their own enum, and wrap them separately in `BondingError` and - // `UnbondingError`. - /// Internal error: failed to deserialize the stake's key. - /// ``` - /// # use casper_types::system::handle_payment::Error; - /// assert_eq!(20, Error::StakesKeyDeserializationFailed as u8); - /// ``` - StakesKeyDeserializationFailed = 20, - /// Internal error: failed to deserialize the stake's balance. - /// ``` - /// # use casper_types::system::handle_payment::Error; - /// assert_eq!(21, Error::StakesDeserializationFailed as u8); - /// ``` - StakesDeserializationFailed = 21, - /// The invoked Handle Payment function can only be called by system contracts, but was called - /// by a user contract. - /// ``` - /// # use casper_types::system::handle_payment::Error; - /// assert_eq!(22, Error::SystemFunctionCalledByUserAccount as u8); - /// ``` - SystemFunctionCalledByUserAccount = 22, - /// Internal error: while finalizing payment, the amount spent exceeded the amount available. - /// ``` - /// # use casper_types::system::handle_payment::Error; - /// assert_eq!(23, Error::InsufficientPaymentForAmountSpent as u8); - /// ``` - InsufficientPaymentForAmountSpent = 23, - /// Internal error: while finalizing payment, failed to pay the validators (the transfer from - /// the Handle Payment contract's payment purse to rewards purse failed). - /// ``` - /// # use casper_types::system::handle_payment::Error; - /// assert_eq!(24, Error::FailedTransferToRewardsPurse as u8); - /// ``` - FailedTransferToRewardsPurse = 24, - /// Internal error: while finalizing payment, failed to refund the caller's purse (the transfer - /// from the Handle Payment contract's payment purse to refund purse or account's main purse - /// failed). - /// ``` - /// # use casper_types::system::handle_payment::Error; - /// assert_eq!(25, Error::FailedTransferToAccountPurse as u8); - /// ``` - FailedTransferToAccountPurse = 25, - /// Handle Payment contract's "set_refund_purse" method can only be called by the payment code - /// of a deploy, but was called by the session code. - /// ``` - /// # use casper_types::system::handle_payment::Error; - /// assert_eq!(26, Error::SetRefundPurseCalledOutsidePayment as u8); - /// ``` - SetRefundPurseCalledOutsidePayment = 26, - /// Raised when the system is unable to determine purse balance. - /// ``` - /// # use casper_types::system::handle_payment::Error; - /// assert_eq!(27, Error::GetBalance as u8); - /// ``` - GetBalance = 27, - /// Raised when the system is unable to put named key. - /// ``` - /// # use casper_types::system::handle_payment::Error; - /// assert_eq!(28, Error::PutKey as u8); - /// ``` - PutKey = 28, - /// Raised when the system is unable to remove given named key. - /// ``` - /// # use casper_types::system::handle_payment::Error; - /// assert_eq!(29, Error::RemoveKey as u8); - /// ``` - RemoveKey = 29, - /// Failed to transfer funds. - /// ``` - /// # use casper_types::system::handle_payment::Error; - /// assert_eq!(30, Error::Transfer as u8); - /// ``` - Transfer = 30, - /// An arithmetic overflow occurred - /// ``` - /// # use casper_types::system::handle_payment::Error; - /// assert_eq!(31, Error::ArithmeticOverflow as u8); - /// ``` - ArithmeticOverflow = 31, - // NOTE: These variants below will be removed once support for WASM system contracts will be - // dropped. - #[doc(hidden)] - GasLimit = 32, - /// Refund purse is a payment purse. - /// ``` - /// # use casper_types::system::handle_payment::Error; - /// assert_eq!(33, Error::RefundPurseIsPaymentPurse as u8); - /// ``` - RefundPurseIsPaymentPurse = 33, - /// Error raised while reducing total supply on the mint system contract. - /// ``` - /// # use casper_types::system::handle_payment::Error; - /// assert_eq!(34, Error::ReduceTotalSupply as u8); - /// ``` - ReduceTotalSupply = 34, - /// Error writing to a storage. - /// ``` - /// # use casper_types::system::handle_payment::Error; - /// assert_eq!(35, Error::Storage as u8); - /// ``` - Storage = 35, - /// Internal error: the Handle Payment contract's accumulation purse wasn't found. - /// ``` - /// # use casper_types::system::handle_payment::Error; - /// assert_eq!(36, Error::AccumulationPurseNotFound as u8); - /// ``` - AccumulationPurseNotFound = 36, - /// Internal error: the Handle Payment contract's accumulation purse key was the wrong type. - /// ``` - /// # use casper_types::system::handle_payment::Error; - /// assert_eq!(37, Error::AccumulationPurseKeyUnexpectedType as u8); - /// ``` - AccumulationPurseKeyUnexpectedType = 37, -} - -impl Display for Error { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - match self { - Error::NotBonded => formatter.write_str("Not bonded"), - Error::TooManyEventsInQueue => formatter.write_str("Too many events in queue"), - Error::CannotUnbondLastValidator => formatter.write_str("Cannot unbond last validator"), - Error::SpreadTooHigh => formatter.write_str("Spread is too high"), - Error::MultipleRequests => formatter.write_str("Multiple requests"), - Error::BondTooSmall => formatter.write_str("Bond is too small"), - Error::BondTooLarge => formatter.write_str("Bond is too large"), - Error::UnbondTooLarge => formatter.write_str("Unbond is too large"), - Error::BondTransferFailed => formatter.write_str("Bond transfer failed"), - Error::UnbondTransferFailed => formatter.write_str("Unbond transfer failed"), - Error::TimeWentBackwards => formatter.write_str("Time went backwards"), - Error::StakesNotFound => formatter.write_str("Stakes not found"), - Error::PaymentPurseNotFound => formatter.write_str("Payment purse not found"), - Error::PaymentPurseKeyUnexpectedType => { - formatter.write_str("Payment purse has unexpected type") - } - Error::PaymentPurseBalanceNotFound => { - formatter.write_str("Payment purse balance not found") - } - Error::BondingPurseNotFound => formatter.write_str("Bonding purse not found"), - Error::BondingPurseKeyUnexpectedType => { - formatter.write_str("Bonding purse key has unexpected type") - } - Error::RefundPurseKeyUnexpectedType => { - formatter.write_str("Refund purse key has unexpected type") - } - Error::RewardsPurseNotFound => formatter.write_str("Rewards purse not found"), - Error::RewardsPurseKeyUnexpectedType => { - formatter.write_str("Rewards purse has unexpected type") - } - Error::StakesKeyDeserializationFailed => { - formatter.write_str("Failed to deserialize stake's key") - } - Error::StakesDeserializationFailed => { - formatter.write_str("Failed to deserialize stake's balance") - } - Error::SystemFunctionCalledByUserAccount => { - formatter.write_str("System function was called by user account") - } - Error::InsufficientPaymentForAmountSpent => { - formatter.write_str("Insufficient payment for amount spent") - } - Error::FailedTransferToRewardsPurse => { - formatter.write_str("Transfer to rewards purse has failed") - } - Error::FailedTransferToAccountPurse => { - formatter.write_str("Transfer to account's purse failed") - } - Error::SetRefundPurseCalledOutsidePayment => { - formatter.write_str("Set refund purse was called outside payment") - } - Error::GetBalance => formatter.write_str("Unable to get purse balance"), - Error::PutKey => formatter.write_str("Unable to put named key"), - Error::RemoveKey => formatter.write_str("Unable to remove named key"), - Error::Transfer => formatter.write_str("Failed to transfer funds"), - Error::ArithmeticOverflow => formatter.write_str("Arithmetic overflow"), - Error::GasLimit => formatter.write_str("GasLimit"), - Error::RefundPurseIsPaymentPurse => { - formatter.write_str("Refund purse is a payment purse.") - } - Error::ReduceTotalSupply => formatter.write_str("Failed to reduce total supply."), - Error::Storage => formatter.write_str("Failed to write to storage."), - Error::AccumulationPurseNotFound => formatter.write_str("Accumulation purse not found"), - Error::AccumulationPurseKeyUnexpectedType => { - formatter.write_str("Accumulation purse has unexpected type") - } - } - } -} - -impl TryFrom for Error { - type Error = (); - - fn try_from(value: u8) -> Result { - let error = match value { - v if v == Error::NotBonded as u8 => Error::NotBonded, - v if v == Error::TooManyEventsInQueue as u8 => Error::TooManyEventsInQueue, - v if v == Error::CannotUnbondLastValidator as u8 => Error::CannotUnbondLastValidator, - v if v == Error::SpreadTooHigh as u8 => Error::SpreadTooHigh, - v if v == Error::MultipleRequests as u8 => Error::MultipleRequests, - v if v == Error::BondTooSmall as u8 => Error::BondTooSmall, - v if v == Error::BondTooLarge as u8 => Error::BondTooLarge, - v if v == Error::UnbondTooLarge as u8 => Error::UnbondTooLarge, - v if v == Error::BondTransferFailed as u8 => Error::BondTransferFailed, - v if v == Error::UnbondTransferFailed as u8 => Error::UnbondTransferFailed, - v if v == Error::TimeWentBackwards as u8 => Error::TimeWentBackwards, - v if v == Error::StakesNotFound as u8 => Error::StakesNotFound, - v if v == Error::PaymentPurseNotFound as u8 => Error::PaymentPurseNotFound, - v if v == Error::PaymentPurseKeyUnexpectedType as u8 => { - Error::PaymentPurseKeyUnexpectedType - } - v if v == Error::PaymentPurseBalanceNotFound as u8 => { - Error::PaymentPurseBalanceNotFound - } - v if v == Error::BondingPurseNotFound as u8 => Error::BondingPurseNotFound, - v if v == Error::BondingPurseKeyUnexpectedType as u8 => { - Error::BondingPurseKeyUnexpectedType - } - v if v == Error::RefundPurseKeyUnexpectedType as u8 => { - Error::RefundPurseKeyUnexpectedType - } - v if v == Error::RewardsPurseNotFound as u8 => Error::RewardsPurseNotFound, - v if v == Error::RewardsPurseKeyUnexpectedType as u8 => { - Error::RewardsPurseKeyUnexpectedType - } - v if v == Error::StakesKeyDeserializationFailed as u8 => { - Error::StakesKeyDeserializationFailed - } - v if v == Error::StakesDeserializationFailed as u8 => { - Error::StakesDeserializationFailed - } - v if v == Error::SystemFunctionCalledByUserAccount as u8 => { - Error::SystemFunctionCalledByUserAccount - } - v if v == Error::InsufficientPaymentForAmountSpent as u8 => { - Error::InsufficientPaymentForAmountSpent - } - v if v == Error::FailedTransferToRewardsPurse as u8 => { - Error::FailedTransferToRewardsPurse - } - v if v == Error::FailedTransferToAccountPurse as u8 => { - Error::FailedTransferToAccountPurse - } - v if v == Error::SetRefundPurseCalledOutsidePayment as u8 => { - Error::SetRefundPurseCalledOutsidePayment - } - - v if v == Error::GetBalance as u8 => Error::GetBalance, - v if v == Error::PutKey as u8 => Error::PutKey, - v if v == Error::RemoveKey as u8 => Error::RemoveKey, - v if v == Error::Transfer as u8 => Error::Transfer, - v if v == Error::ArithmeticOverflow as u8 => Error::ArithmeticOverflow, - v if v == Error::GasLimit as u8 => Error::GasLimit, - v if v == Error::RefundPurseIsPaymentPurse as u8 => Error::RefundPurseIsPaymentPurse, - v if v == Error::ReduceTotalSupply as u8 => Error::ReduceTotalSupply, - v if v == Error::Storage as u8 => Error::Storage, - v if v == Error::AccumulationPurseNotFound as u8 => Error::AccumulationPurseNotFound, - v if v == Error::AccumulationPurseKeyUnexpectedType as u8 => { - Error::AccumulationPurseKeyUnexpectedType - } - _ => return Err(()), - }; - Ok(error) - } -} - -impl CLTyped for Error { - fn cl_type() -> CLType { - CLType::U8 - } -} - -impl ToBytes for Error { - fn to_bytes(&self) -> result::Result, bytesrepr::Error> { - let value = *self as u8; - value.to_bytes() - } - - fn serialized_length(&self) -> usize { - U8_SERIALIZED_LENGTH - } -} diff --git a/casper_types/src/system/mint.rs b/casper_types/src/system/mint.rs deleted file mode 100644 index 4a7e58a1..00000000 --- a/casper_types/src/system/mint.rs +++ /dev/null @@ -1,8 +0,0 @@ -//! Contains implementation of a Mint contract functionality. -mod constants; -mod entry_points; -mod error; - -pub use constants::*; -pub use entry_points::mint_entry_points; -pub use error::Error; diff --git a/casper_types/src/system/mint/constants.rs b/casper_types/src/system/mint/constants.rs deleted file mode 100644 index cffada44..00000000 --- a/casper_types/src/system/mint/constants.rs +++ /dev/null @@ -1,40 +0,0 @@ -/// Named constant for `purse`. -pub const ARG_PURSE: &str = "purse"; -/// Named constant for `amount`. -pub const ARG_AMOUNT: &str = "amount"; -/// Named constant for `id`. -pub const ARG_ID: &str = "id"; -/// Named constant for `to`. -pub const ARG_TO: &str = "to"; -/// Named constant for `source`. -pub const ARG_SOURCE: &str = "source"; -/// Named constant for `target`. -pub const ARG_TARGET: &str = "target"; -/// Named constant for `round_seigniorage_rate` used in installer. -pub const ARG_ROUND_SEIGNIORAGE_RATE: &str = "round_seigniorage_rate"; - -/// Named constant for method `mint`. -pub const METHOD_MINT: &str = "mint"; -/// Named constant for method `reduce_total_supply`. -pub const METHOD_REDUCE_TOTAL_SUPPLY: &str = "reduce_total_supply"; -/// Named constant for (synthetic) method `create` -pub const METHOD_CREATE: &str = "create"; -/// Named constant for method `balance`. -pub const METHOD_BALANCE: &str = "balance"; -/// Named constant for method `transfer`. -pub const METHOD_TRANSFER: &str = "transfer"; -/// Named constant for method `read_base_round_reward`. -pub const METHOD_READ_BASE_ROUND_REWARD: &str = "read_base_round_reward"; -/// Named constant for method `mint_into_existing_purse`. -pub const METHOD_MINT_INTO_EXISTING_PURSE: &str = "mint_into_existing_purse"; - -/// Storage for mint contract hash. -pub const HASH_KEY: &str = "mint_hash"; -/// Storage for mint access key. -pub const ACCESS_KEY: &str = "mint_access"; -/// Storage for base round reward key. -pub const BASE_ROUND_REWARD_KEY: &str = "mint_base_round_reward"; -/// Storage for mint total supply key. -pub const TOTAL_SUPPLY_KEY: &str = "total_supply"; -/// Storage for mint round seigniorage rate. -pub const ROUND_SEIGNIORAGE_RATE_KEY: &str = "round_seigniorage_rate"; diff --git a/casper_types/src/system/mint/entry_points.rs b/casper_types/src/system/mint/entry_points.rs deleted file mode 100644 index bbc82c20..00000000 --- a/casper_types/src/system/mint/entry_points.rs +++ /dev/null @@ -1,102 +0,0 @@ -use alloc::boxed::Box; - -use crate::{ - contracts::Parameters, - system::mint::{ - ARG_AMOUNT, ARG_ID, ARG_PURSE, ARG_SOURCE, ARG_TARGET, ARG_TO, METHOD_BALANCE, - METHOD_CREATE, METHOD_MINT, METHOD_MINT_INTO_EXISTING_PURSE, METHOD_READ_BASE_ROUND_REWARD, - METHOD_REDUCE_TOTAL_SUPPLY, METHOD_TRANSFER, - }, - CLType, EntryPoint, EntryPointAccess, EntryPointType, EntryPoints, Parameter, -}; - -/// Returns entry points for a mint system contract. -pub fn mint_entry_points() -> EntryPoints { - let mut entry_points = EntryPoints::new(); - - let entry_point = EntryPoint::new( - METHOD_MINT, - vec![Parameter::new(ARG_AMOUNT, CLType::U512)], - CLType::Result { - ok: Box::new(CLType::URef), - err: Box::new(CLType::U8), - }, - EntryPointAccess::Public, - EntryPointType::Contract, - ); - entry_points.add_entry_point(entry_point); - - let entry_point = EntryPoint::new( - METHOD_REDUCE_TOTAL_SUPPLY, - vec![Parameter::new(ARG_AMOUNT, CLType::U512)], - CLType::Result { - ok: Box::new(CLType::Unit), - err: Box::new(CLType::U8), - }, - EntryPointAccess::Public, - EntryPointType::Contract, - ); - entry_points.add_entry_point(entry_point); - - let entry_point = EntryPoint::new( - METHOD_CREATE, - Parameters::new(), - CLType::URef, - EntryPointAccess::Public, - EntryPointType::Contract, - ); - entry_points.add_entry_point(entry_point); - - let entry_point = EntryPoint::new( - METHOD_BALANCE, - vec![Parameter::new(ARG_PURSE, CLType::URef)], - CLType::Option(Box::new(CLType::U512)), - EntryPointAccess::Public, - EntryPointType::Contract, - ); - entry_points.add_entry_point(entry_point); - - let entry_point = EntryPoint::new( - METHOD_TRANSFER, - vec![ - Parameter::new(ARG_TO, CLType::Option(Box::new(CLType::ByteArray(32)))), - Parameter::new(ARG_SOURCE, CLType::URef), - Parameter::new(ARG_TARGET, CLType::URef), - Parameter::new(ARG_AMOUNT, CLType::U512), - Parameter::new(ARG_ID, CLType::Option(Box::new(CLType::U64))), - ], - CLType::Result { - ok: Box::new(CLType::Unit), - err: Box::new(CLType::U8), - }, - EntryPointAccess::Public, - EntryPointType::Contract, - ); - entry_points.add_entry_point(entry_point); - - let entry_point = EntryPoint::new( - METHOD_READ_BASE_ROUND_REWARD, - Parameters::new(), - CLType::U512, - EntryPointAccess::Public, - EntryPointType::Contract, - ); - entry_points.add_entry_point(entry_point); - - let entry_point = EntryPoint::new( - METHOD_MINT_INTO_EXISTING_PURSE, - vec![ - Parameter::new(ARG_AMOUNT, CLType::U512), - Parameter::new(ARG_PURSE, CLType::URef), - ], - CLType::Result { - ok: Box::new(CLType::Unit), - err: Box::new(CLType::U8), - }, - EntryPointAccess::Public, - EntryPointType::Contract, - ); - entry_points.add_entry_point(entry_point); - - entry_points -} diff --git a/casper_types/src/system/mint/error.rs b/casper_types/src/system/mint/error.rs deleted file mode 100644 index db327a40..00000000 --- a/casper_types/src/system/mint/error.rs +++ /dev/null @@ -1,298 +0,0 @@ -//! Home of the Mint contract's [`enum@Error`] type. - -use alloc::vec::Vec; -use core::{ - convert::{TryFrom, TryInto}, - fmt::{self, Display, Formatter}, -}; - -use crate::{ - bytesrepr::{self, FromBytes, ToBytes, U8_SERIALIZED_LENGTH}, - CLType, CLTyped, -}; - -/// Errors which can occur while executing the Mint contract. -#[derive(Debug, Copy, Clone, PartialEq, Eq)] -#[repr(u8)] -#[non_exhaustive] -pub enum Error { - /// Insufficient funds to complete the transfer. - /// ``` - /// # use casper_types::system::mint::Error; - /// assert_eq!(0, Error::InsufficientFunds as u8); - /// ``` - InsufficientFunds = 0, - /// Source purse not found. - /// ``` - /// # use casper_types::system::mint::Error; - /// assert_eq!(1, Error::SourceNotFound as u8); - /// ``` - SourceNotFound = 1, - /// Destination purse not found. - /// ``` - /// # use casper_types::system::mint::Error; - /// assert_eq!(2, Error::DestNotFound as u8); - /// ``` - DestNotFound = 2, - /// The given [`URef`](crate::URef) does not reference the account holder's purse, or such a - /// `URef` does not have the required [`AccessRights`](crate::AccessRights). - /// ``` - /// # use casper_types::system::mint::Error; - /// assert_eq!(3, Error::InvalidURef as u8); - /// ``` - InvalidURef = 3, - /// The source purse is not writeable (see [`URef::is_writeable`](crate::URef::is_writeable)), - /// or the destination purse is not addable (see - /// [`URef::is_addable`](crate::URef::is_addable)). - /// ``` - /// # use casper_types::system::mint::Error; - /// assert_eq!(4, Error::InvalidAccessRights as u8); - /// ``` - InvalidAccessRights = 4, - /// Tried to create a new purse with a non-zero initial balance. - /// ``` - /// # use casper_types::system::mint::Error; - /// assert_eq!(5, Error::InvalidNonEmptyPurseCreation as u8); - /// ``` - InvalidNonEmptyPurseCreation = 5, - /// Failed to read from local or global storage. - /// ``` - /// # use casper_types::system::mint::Error; - /// assert_eq!(6, Error::Storage as u8); - /// ``` - Storage = 6, - /// Purse not found while trying to get balance. - /// ``` - /// # use casper_types::system::mint::Error; - /// assert_eq!(7, Error::PurseNotFound as u8); - /// ``` - PurseNotFound = 7, - /// Unable to obtain a key by its name. - /// ``` - /// # use casper_types::system::mint::Error; - /// assert_eq!(8, Error::MissingKey as u8); - /// ``` - MissingKey = 8, - /// Total supply not found. - /// ``` - /// # use casper_types::system::mint::Error; - /// assert_eq!(9, Error::TotalSupplyNotFound as u8); - /// ``` - TotalSupplyNotFound = 9, - /// Failed to record transfer. - /// ``` - /// # use casper_types::system::mint::Error; - /// assert_eq!(10, Error::RecordTransferFailure as u8); - /// ``` - RecordTransferFailure = 10, - /// Invalid attempt to reduce total supply. - /// ``` - /// # use casper_types::system::mint::Error; - /// assert_eq!(11, Error::InvalidTotalSupplyReductionAttempt as u8); - /// ``` - InvalidTotalSupplyReductionAttempt = 11, - /// Failed to create new uref. - /// ``` - /// # use casper_types::system::mint::Error; - /// assert_eq!(12, Error::NewURef as u8); - /// ``` - NewURef = 12, - /// Failed to put key. - /// ``` - /// # use casper_types::system::mint::Error; - /// assert_eq!(13, Error::PutKey as u8); - /// ``` - PutKey = 13, - /// Failed to write to dictionary. - /// ``` - /// # use casper_types::system::mint::Error; - /// assert_eq!(14, Error::WriteDictionary as u8); - /// ``` - WriteDictionary = 14, - /// Failed to create a [`crate::CLValue`]. - /// ``` - /// # use casper_types::system::mint::Error; - /// assert_eq!(15, Error::CLValue as u8); - /// ``` - CLValue = 15, - /// Failed to serialize data. - /// ``` - /// # use casper_types::system::mint::Error; - /// assert_eq!(16, Error::Serialize as u8); - /// ``` - Serialize = 16, - /// Source and target purse [`crate::URef`]s are equal. - /// ``` - /// # use casper_types::system::mint::Error; - /// assert_eq!(17, Error::EqualSourceAndTarget as u8); - /// ``` - EqualSourceAndTarget = 17, - /// An arithmetic overflow has occurred. - /// ``` - /// # use casper_types::system::mint::Error; - /// assert_eq!(18, Error::ArithmeticOverflow as u8); - /// ``` - ArithmeticOverflow = 18, - - // NOTE: These variants below will be removed once support for WASM system contracts will be - // dropped. - #[doc(hidden)] - GasLimit = 19, - - /// Raised when an entry point is called from invalid account context. - InvalidContext = 20, - - /// Session code tried to transfer more CSPR than user approved. - /// ``` - /// # use casper_types::system::mint::Error; - /// assert_eq!(21, Error::UnapprovedSpendingAmount as u8); - UnapprovedSpendingAmount = 21, - - /// Failed to transfer tokens on a private chain. - /// ``` - /// # use casper_types::system::mint::Error; - /// assert_eq!(22, Error::DisabledUnrestrictedTransfers as u8); - DisabledUnrestrictedTransfers = 22, - - #[cfg(test)] - #[doc(hidden)] - Sentinel, -} - -/// Used for testing; this should be guaranteed to be the maximum valid value of [`Error`] enum. -#[cfg(test)] -const MAX_ERROR_VALUE: u8 = Error::Sentinel as u8; - -impl CLTyped for Error { - fn cl_type() -> CLType { - CLType::U8 - } -} - -// This error type is not intended to be used by third party crates. -#[doc(hidden)] -pub struct TryFromU8ForError(()); - -// This conversion is not intended to be used by third party crates. -#[doc(hidden)] -impl TryFrom for Error { - type Error = TryFromU8ForError; - - fn try_from(value: u8) -> Result { - match value { - d if d == Error::InsufficientFunds as u8 => Ok(Error::InsufficientFunds), - d if d == Error::SourceNotFound as u8 => Ok(Error::SourceNotFound), - d if d == Error::DestNotFound as u8 => Ok(Error::DestNotFound), - d if d == Error::InvalidURef as u8 => Ok(Error::InvalidURef), - d if d == Error::InvalidAccessRights as u8 => Ok(Error::InvalidAccessRights), - d if d == Error::InvalidNonEmptyPurseCreation as u8 => { - Ok(Error::InvalidNonEmptyPurseCreation) - } - d if d == Error::Storage as u8 => Ok(Error::Storage), - d if d == Error::PurseNotFound as u8 => Ok(Error::PurseNotFound), - d if d == Error::MissingKey as u8 => Ok(Error::MissingKey), - d if d == Error::TotalSupplyNotFound as u8 => Ok(Error::TotalSupplyNotFound), - d if d == Error::RecordTransferFailure as u8 => Ok(Error::RecordTransferFailure), - d if d == Error::InvalidTotalSupplyReductionAttempt as u8 => { - Ok(Error::InvalidTotalSupplyReductionAttempt) - } - d if d == Error::NewURef as u8 => Ok(Error::NewURef), - d if d == Error::PutKey as u8 => Ok(Error::PutKey), - d if d == Error::WriteDictionary as u8 => Ok(Error::WriteDictionary), - d if d == Error::CLValue as u8 => Ok(Error::CLValue), - d if d == Error::Serialize as u8 => Ok(Error::Serialize), - d if d == Error::EqualSourceAndTarget as u8 => Ok(Error::EqualSourceAndTarget), - d if d == Error::ArithmeticOverflow as u8 => Ok(Error::ArithmeticOverflow), - d if d == Error::GasLimit as u8 => Ok(Error::GasLimit), - d if d == Error::InvalidContext as u8 => Ok(Error::InvalidContext), - d if d == Error::UnapprovedSpendingAmount as u8 => Ok(Error::UnapprovedSpendingAmount), - d if d == Error::DisabledUnrestrictedTransfers as u8 => { - Ok(Error::DisabledUnrestrictedTransfers) - } - _ => Err(TryFromU8ForError(())), - } - } -} - -impl ToBytes for Error { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let value = *self as u8; - value.to_bytes() - } - - fn serialized_length(&self) -> usize { - U8_SERIALIZED_LENGTH - } -} - -impl FromBytes for Error { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (value, rem): (u8, _) = FromBytes::from_bytes(bytes)?; - let error: Error = value - .try_into() - // In case an Error variant is unable to be determined it would return an - // Error::Formatting as if its unable to be correctly deserialized. - .map_err(|_| bytesrepr::Error::Formatting)?; - Ok((error, rem)) - } -} - -impl Display for Error { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - match self { - Error::InsufficientFunds => formatter.write_str("Insufficient funds"), - Error::SourceNotFound => formatter.write_str("Source not found"), - Error::DestNotFound => formatter.write_str("Destination not found"), - Error::InvalidURef => formatter.write_str("Invalid URef"), - Error::InvalidAccessRights => formatter.write_str("Invalid AccessRights"), - Error::InvalidNonEmptyPurseCreation => { - formatter.write_str("Invalid non-empty purse creation") - } - Error::Storage => formatter.write_str("Storage error"), - Error::PurseNotFound => formatter.write_str("Purse not found"), - Error::MissingKey => formatter.write_str("Missing key"), - Error::TotalSupplyNotFound => formatter.write_str("Total supply not found"), - Error::RecordTransferFailure => formatter.write_str("Failed to record transfer"), - Error::InvalidTotalSupplyReductionAttempt => { - formatter.write_str("Invalid attempt to reduce total supply") - } - Error::NewURef => formatter.write_str("Failed to create new uref"), - Error::PutKey => formatter.write_str("Failed to put key"), - Error::WriteDictionary => formatter.write_str("Failed to write dictionary"), - Error::CLValue => formatter.write_str("Failed to create a CLValue"), - Error::Serialize => formatter.write_str("Failed to serialize data"), - Error::EqualSourceAndTarget => formatter.write_str("Invalid target purse"), - Error::ArithmeticOverflow => formatter.write_str("Arithmetic overflow has occurred"), - Error::GasLimit => formatter.write_str("GasLimit"), - Error::InvalidContext => formatter.write_str("Invalid context"), - Error::UnapprovedSpendingAmount => formatter.write_str("Unapproved spending amount"), - Error::DisabledUnrestrictedTransfers => { - formatter.write_str("Disabled unrestricted transfers") - } - #[cfg(test)] - Error::Sentinel => formatter.write_str("Sentinel error"), - } - } -} - -#[cfg(test)] -mod tests { - use super::{Error, TryFromU8ForError, MAX_ERROR_VALUE}; - - #[test] - fn error_round_trips() { - for i in 0..=u8::max_value() { - match Error::try_from(i) { - Ok(error) if i < MAX_ERROR_VALUE => assert_eq!(error as u8, i), - Ok(error) => panic!( - "value of variant {:?} ({}) exceeds MAX_ERROR_VALUE ({})", - error, i, MAX_ERROR_VALUE - ), - Err(TryFromU8ForError(())) if i >= MAX_ERROR_VALUE => (), - Err(TryFromU8ForError(())) => { - panic!("missing conversion from u8 to error value: {}", i) - } - } - } - } -} diff --git a/casper_types/src/system/standard_payment.rs b/casper_types/src/system/standard_payment.rs deleted file mode 100644 index 92c3fab3..00000000 --- a/casper_types/src/system/standard_payment.rs +++ /dev/null @@ -1,6 +0,0 @@ -//! Contains implementation of a standard payment contract implementation. -mod constants; -mod entry_points; - -pub use constants::*; -pub use entry_points::standard_payment_entry_points; diff --git a/casper_types/src/system/standard_payment/constants.rs b/casper_types/src/system/standard_payment/constants.rs deleted file mode 100644 index 9bd88784..00000000 --- a/casper_types/src/system/standard_payment/constants.rs +++ /dev/null @@ -1,10 +0,0 @@ -/// Named constant for `amount`. -pub const ARG_AMOUNT: &str = "amount"; - -/// Named constant for method `pay`. -pub const METHOD_PAY: &str = "pay"; - -/// Storage for standard payment contract hash. -pub const HASH_KEY: &str = "standard_payment_hash"; -/// Storage for standard payment access key. -pub const ACCESS_KEY: &str = "standard_payment_access"; diff --git a/casper_types/src/system/standard_payment/entry_points.rs b/casper_types/src/system/standard_payment/entry_points.rs deleted file mode 100644 index 3eeaed52..00000000 --- a/casper_types/src/system/standard_payment/entry_points.rs +++ /dev/null @@ -1,25 +0,0 @@ -use alloc::{boxed::Box, string::ToString}; - -use crate::{ - system::standard_payment::{ARG_AMOUNT, METHOD_PAY}, - CLType, EntryPoint, EntryPointAccess, EntryPointType, EntryPoints, Parameter, -}; - -/// Creates standard payment contract entry points. -pub fn standard_payment_entry_points() -> EntryPoints { - let mut entry_points = EntryPoints::new(); - - let entry_point = EntryPoint::new( - METHOD_PAY.to_string(), - vec![Parameter::new(ARG_AMOUNT, CLType::U512)], - CLType::Result { - ok: Box::new(CLType::Unit), - err: Box::new(CLType::U32), - }, - EntryPointAccess::Public, - EntryPointType::Session, - ); - entry_points.add_entry_point(entry_point); - - entry_points -} diff --git a/casper_types/src/system/system_contract_type.rs b/casper_types/src/system/system_contract_type.rs deleted file mode 100644 index 7709f6d9..00000000 --- a/casper_types/src/system/system_contract_type.rs +++ /dev/null @@ -1,171 +0,0 @@ -//! Home of system contract type enum. - -use alloc::string::{String, ToString}; -use core::{ - convert::TryFrom, - fmt::{self, Display, Formatter}, -}; - -use crate::{ApiError, EntryPoints}; - -use super::{ - auction::auction_entry_points, handle_payment::handle_payment_entry_points, - mint::mint_entry_points, standard_payment::standard_payment_entry_points, -}; - -/// System contract types. -/// -/// Used by converting to a `u32` and passing as the `system_contract_index` argument of -/// `ext_ffi::casper_get_system_contract()`. -#[derive(Debug, Copy, Clone, PartialEq, Eq)] -pub enum SystemContractType { - /// Mint contract. - Mint, - /// Handle Payment contract. - HandlePayment, - /// Standard Payment contract. - StandardPayment, - /// Auction contract. - Auction, -} - -/// Name of mint system contract -pub const MINT: &str = "mint"; -/// Name of handle payment system contract -pub const HANDLE_PAYMENT: &str = "handle payment"; -/// Name of standard payment system contract -pub const STANDARD_PAYMENT: &str = "standard payment"; -/// Name of auction system contract -pub const AUCTION: &str = "auction"; - -impl SystemContractType { - /// Returns the name of the system contract. - pub fn contract_name(&self) -> String { - match self { - SystemContractType::Mint => MINT.to_string(), - SystemContractType::HandlePayment => HANDLE_PAYMENT.to_string(), - SystemContractType::StandardPayment => STANDARD_PAYMENT.to_string(), - SystemContractType::Auction => AUCTION.to_string(), - } - } - - /// Returns the entrypoint of the system contract. - pub fn contract_entry_points(&self) -> EntryPoints { - match self { - SystemContractType::Mint => mint_entry_points(), - SystemContractType::HandlePayment => handle_payment_entry_points(), - SystemContractType::StandardPayment => standard_payment_entry_points(), - SystemContractType::Auction => auction_entry_points(), - } - } -} - -impl From for u32 { - fn from(system_contract_type: SystemContractType) -> u32 { - match system_contract_type { - SystemContractType::Mint => 0, - SystemContractType::HandlePayment => 1, - SystemContractType::StandardPayment => 2, - SystemContractType::Auction => 3, - } - } -} - -// This conversion is not intended to be used by third party crates. -#[doc(hidden)] -impl TryFrom for SystemContractType { - type Error = ApiError; - fn try_from(value: u32) -> Result { - match value { - 0 => Ok(SystemContractType::Mint), - 1 => Ok(SystemContractType::HandlePayment), - 2 => Ok(SystemContractType::StandardPayment), - 3 => Ok(SystemContractType::Auction), - _ => Err(ApiError::InvalidSystemContract), - } - } -} - -impl Display for SystemContractType { - fn fmt(&self, f: &mut Formatter) -> fmt::Result { - match *self { - SystemContractType::Mint => write!(f, "{}", MINT), - SystemContractType::HandlePayment => write!(f, "{}", HANDLE_PAYMENT), - SystemContractType::StandardPayment => write!(f, "{}", STANDARD_PAYMENT), - SystemContractType::Auction => write!(f, "{}", AUCTION), - } - } -} - -#[cfg(test)] -mod tests { - use std::string::ToString; - - use super::*; - - #[test] - fn get_index_of_mint_contract() { - let index: u32 = SystemContractType::Mint.into(); - assert_eq!(index, 0u32); - assert_eq!(SystemContractType::Mint.to_string(), MINT); - } - - #[test] - fn get_index_of_handle_payment_contract() { - let index: u32 = SystemContractType::HandlePayment.into(); - assert_eq!(index, 1u32); - assert_eq!( - SystemContractType::HandlePayment.to_string(), - HANDLE_PAYMENT - ); - } - - #[test] - fn get_index_of_standard_payment_contract() { - let index: u32 = SystemContractType::StandardPayment.into(); - assert_eq!(index, 2u32); - assert_eq!( - SystemContractType::StandardPayment.to_string(), - STANDARD_PAYMENT - ); - } - - #[test] - fn get_index_of_auction_contract() { - let index: u32 = SystemContractType::Auction.into(); - assert_eq!(index, 3u32); - assert_eq!(SystemContractType::Auction.to_string(), AUCTION); - } - - #[test] - fn create_mint_variant_from_int() { - let mint = SystemContractType::try_from(0).ok().unwrap(); - assert_eq!(mint, SystemContractType::Mint); - } - - #[test] - fn create_handle_payment_variant_from_int() { - let handle_payment = SystemContractType::try_from(1).ok().unwrap(); - assert_eq!(handle_payment, SystemContractType::HandlePayment); - } - - #[test] - fn create_standard_payment_variant_from_int() { - let handle_payment = SystemContractType::try_from(2).ok().unwrap(); - assert_eq!(handle_payment, SystemContractType::StandardPayment); - } - - #[test] - fn create_auction_variant_from_int() { - let auction = SystemContractType::try_from(3).ok().unwrap(); - assert_eq!(auction, SystemContractType::Auction); - } - - #[test] - fn create_unknown_system_contract_variant() { - assert!(SystemContractType::try_from(4).is_err()); - assert!(SystemContractType::try_from(5).is_err()); - assert!(SystemContractType::try_from(10).is_err()); - assert!(SystemContractType::try_from(u32::max_value()).is_err()); - } -} diff --git a/casper_types/src/tagged.rs b/casper_types/src/tagged.rs deleted file mode 100644 index deddfe83..00000000 --- a/casper_types/src/tagged.rs +++ /dev/null @@ -1,5 +0,0 @@ -/// The quality of having a tag -pub trait Tagged { - /// Returns the tag of a given object - fn tag(&self) -> T; -} diff --git a/casper_types/src/testing.rs b/casper_types/src/testing.rs deleted file mode 100644 index 9bbb0e2b..00000000 --- a/casper_types/src/testing.rs +++ /dev/null @@ -1,174 +0,0 @@ -//! An RNG for testing purposes. -use std::{ - cell::RefCell, - cmp, env, - fmt::{self, Debug, Display, Formatter}, - thread, -}; - -use rand::{self, CryptoRng, Error, Rng, RngCore, SeedableRng}; -use rand_pcg::Pcg64Mcg; - -thread_local! { - static THIS_THREAD_HAS_RNG: RefCell = RefCell::new(false); -} - -const CL_TEST_SEED: &str = "CL_TEST_SEED"; - -type Seed = ::Seed; // [u8; 16] - -/// A fast, seedable pseudorandom number generator for use in tests which prints the seed if the -/// thread in which it is created panics. -/// -/// Only one `TestRng` is permitted per thread. -pub struct TestRng { - seed: Seed, - rng: Pcg64Mcg, -} - -impl TestRng { - /// Constructs a new `TestRng` using a seed generated from the env var `CL_TEST_SEED` if set or - /// from cryptographically secure random data if not. - /// - /// Note that `new()` or `default()` should only be called once per test. If a test needs to - /// spawn multiple threads each with their own `TestRng`, then use `new()` to create a single, - /// master `TestRng`, then use it to create a seed per child thread. The child `TestRng`s can - /// then be constructed in their own threads via `from_seed()`. - /// - /// # Panics - /// - /// Panics if a `TestRng` has already been created on this thread. - pub fn new() -> Self { - Self::set_flag_or_panic(); - - let mut seed = Seed::default(); - match env::var(CL_TEST_SEED) { - Ok(seed_as_hex) => { - base16::decode_slice(&seed_as_hex, &mut seed).unwrap_or_else(|error| { - THIS_THREAD_HAS_RNG.with(|flag| { - *flag.borrow_mut() = false; - }); - panic!("can't parse '{}' as a TestRng seed: {}", seed_as_hex, error) - }); - } - Err(_) => { - rand::thread_rng().fill(&mut seed); - } - }; - - let rng = Pcg64Mcg::from_seed(seed); - - TestRng { seed, rng } - } - - /// Constructs a new `TestRng` using `seed`. This should be used in cases where a test needs to - /// spawn multiple threads each with their own `TestRng`. A single, master `TestRng` should be - /// constructed before any child threads are spawned, and that one should be used to create - /// seeds for the child threads' `TestRng`s. - /// - /// # Panics - /// - /// Panics if a `TestRng` has already been created on this thread. - pub fn from_seed(seed: Seed) -> Self { - Self::set_flag_or_panic(); - let rng = Pcg64Mcg::from_seed(seed); - TestRng { seed, rng } - } - - fn set_flag_or_panic() { - THIS_THREAD_HAS_RNG.with(|flag| { - if *flag.borrow() { - panic!("cannot create multiple TestRngs on the same thread"); - } - *flag.borrow_mut() = true; - }); - } - - /// Creates a child RNG. - /// - /// The resulting RNG is seeded from `self` deterministically. - pub fn create_child(&mut self) -> Self { - let seed = self.gen(); - let rng = Pcg64Mcg::from_seed(seed); - TestRng { seed, rng } - } -} - -impl Default for TestRng { - fn default() -> Self { - TestRng::new() - } -} - -impl Display for TestRng { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - write!( - formatter, - "TestRng seed: {}", - base16::encode_lower(&self.seed) - ) - } -} - -impl Debug for TestRng { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - Display::fmt(self, formatter) - } -} - -impl Drop for TestRng { - fn drop(&mut self) { - if thread::panicking() { - let line_1 = format!("Thread: {}", thread::current().name().unwrap_or("unnamed")); - let line_2 = "To reproduce failure, try running with env var:"; - let line_3 = format!("{}={}", CL_TEST_SEED, base16::encode_lower(&self.seed)); - let max_length = cmp::max(line_1.len(), line_2.len()); - let border = "=".repeat(max_length); - println!( - "\n{}\n{}\n{}\n{}\n{}\n", - border, line_1, line_2, line_3, border - ); - } - } -} - -impl SeedableRng for TestRng { - type Seed = ::Seed; - - fn from_seed(seed: Self::Seed) -> Self { - Self::from_seed(seed) - } -} - -impl RngCore for TestRng { - fn next_u32(&mut self) -> u32 { - self.rng.next_u32() - } - - fn next_u64(&mut self) -> u64 { - self.rng.next_u64() - } - - fn fill_bytes(&mut self, dest: &mut [u8]) { - self.rng.fill_bytes(dest) - } - - fn try_fill_bytes(&mut self, dest: &mut [u8]) -> Result<(), Error> { - self.rng.try_fill_bytes(dest) - } -} - -impl CryptoRng for TestRng {} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - #[should_panic(expected = "cannot create multiple TestRngs on the same thread")] - fn second_test_rng_in_thread_should_panic() { - let _test_rng1 = TestRng::new(); - let seed = [1; 16]; - let _test_rng2 = TestRng::from_seed(seed); - } -} diff --git a/casper_types/src/timestamp.rs b/casper_types/src/timestamp.rs deleted file mode 100644 index 563beb69..00000000 --- a/casper_types/src/timestamp.rs +++ /dev/null @@ -1,472 +0,0 @@ -// TODO - remove once schemars stops causing warning. -#![allow(clippy::field_reassign_with_default)] - -use alloc::vec::Vec; -use core::{ - ops::{Add, AddAssign, Div, Mul, Rem, Shl, Shr, Sub, SubAssign}, - time::Duration, -}; -#[cfg(any(feature = "std", test))] -use std::{ - fmt::{self, Display, Formatter}, - str::FromStr, - time::SystemTime, -}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(any(feature = "std", test))] -use humantime::{DurationError, TimestampError}; -#[cfg(any(feature = "testing", test))] -use rand::Rng; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -#[cfg(any(feature = "std", test))] -use serde::{de::Error as SerdeError, Deserialize, Deserializer, Serialize, Serializer}; - -use crate::bytesrepr::{self, FromBytes, ToBytes}; - -#[cfg(any(feature = "testing", test))] -use crate::testing::TestRng; - -/// A timestamp type, representing a concrete moment in time. -#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr( - feature = "json-schema", - derive(JsonSchema), - schemars(with = "String", description = "Timestamp formatted as per RFC 3339") -)] -pub struct Timestamp(u64); - -impl Timestamp { - /// The maximum value a timestamp can have. - pub const MAX: Timestamp = Timestamp(u64::MAX); - - #[cfg(any(feature = "std", test))] - /// Returns the timestamp of the current moment. - pub fn now() -> Self { - let millis = SystemTime::UNIX_EPOCH.elapsed().unwrap().as_millis() as u64; - Timestamp(millis) - } - - #[cfg(any(feature = "std", test))] - /// Returns the time that has elapsed since this timestamp. - pub fn elapsed(&self) -> TimeDiff { - TimeDiff(Timestamp::now().0.saturating_sub(self.0)) - } - - /// Returns a zero timestamp. - pub fn zero() -> Self { - Timestamp(0) - } - - /// Returns the timestamp as the number of milliseconds since the Unix epoch - pub fn millis(&self) -> u64 { - self.0 - } - - /// Returns the difference between `self` and `other`, or `0` if `self` is earlier than `other`. - pub fn saturating_diff(self, other: Timestamp) -> TimeDiff { - TimeDiff(self.0.saturating_sub(other.0)) - } - - /// Returns the difference between `self` and `other`, or `0` if that would be before the epoch. - #[must_use] - pub fn saturating_sub(self, other: TimeDiff) -> Timestamp { - Timestamp(self.0.saturating_sub(other.0)) - } - - /// Returns the sum of `self` and `other`, or the maximum possible value if that would be - /// exceeded. - #[must_use] - pub fn saturating_add(self, other: TimeDiff) -> Timestamp { - Timestamp(self.0.saturating_add(other.0)) - } - - /// Returns the number of trailing zeros in the number of milliseconds since the epoch. - pub fn trailing_zeros(&self) -> u8 { - self.0.trailing_zeros() as u8 - } -} - -#[cfg(any(feature = "testing", test))] -impl Timestamp { - /// Generates a random instance using a `TestRng`. - pub fn random(rng: &mut TestRng) -> Self { - Timestamp(1_596_763_000_000 + rng.gen_range(200_000..1_000_000)) - } - - /// Checked subtraction for timestamps - pub fn checked_sub(self, other: TimeDiff) -> Option { - self.0.checked_sub(other.0).map(Timestamp) - } -} - -#[cfg(any(feature = "std", test))] -impl Display for Timestamp { - fn fmt(&self, f: &mut Formatter) -> fmt::Result { - match SystemTime::UNIX_EPOCH.checked_add(Duration::from_millis(self.0)) { - Some(system_time) => write!(f, "{}", humantime::format_rfc3339_millis(system_time)) - .or_else(|e| write!(f, "Invalid timestamp: {}: {}", e, self.0)), - None => write!(f, "invalid Timestamp: {} ms after the Unix epoch", self.0), - } - } -} - -#[cfg(any(feature = "std", test))] -impl FromStr for Timestamp { - type Err = TimestampError; - - fn from_str(value: &str) -> Result { - let system_time = humantime::parse_rfc3339_weak(value)?; - let inner = system_time - .duration_since(SystemTime::UNIX_EPOCH) - .map_err(|_| TimestampError::OutOfRange)? - .as_millis() as u64; - Ok(Timestamp(inner)) - } -} - -impl Add for Timestamp { - type Output = Timestamp; - - fn add(self, diff: TimeDiff) -> Timestamp { - Timestamp(self.0 + diff.0) - } -} - -impl AddAssign for Timestamp { - fn add_assign(&mut self, rhs: TimeDiff) { - self.0 += rhs.0; - } -} - -#[cfg(any(feature = "testing", test))] -impl std::ops::Sub for Timestamp { - type Output = Timestamp; - - fn sub(self, diff: TimeDiff) -> Timestamp { - Timestamp(self.0 - diff.0) - } -} - -impl Rem for Timestamp { - type Output = TimeDiff; - - fn rem(self, diff: TimeDiff) -> TimeDiff { - TimeDiff(self.0 % diff.0) - } -} - -impl Shl for Timestamp -where - u64: Shl, -{ - type Output = Timestamp; - - fn shl(self, rhs: T) -> Timestamp { - Timestamp(self.0 << rhs) - } -} - -impl Shr for Timestamp -where - u64: Shr, -{ - type Output = Timestamp; - - fn shr(self, rhs: T) -> Timestamp { - Timestamp(self.0 >> rhs) - } -} - -#[cfg(any(feature = "std", test))] -impl Serialize for Timestamp { - fn serialize(&self, serializer: S) -> Result { - if serializer.is_human_readable() { - self.to_string().serialize(serializer) - } else { - self.0.serialize(serializer) - } - } -} - -#[cfg(any(feature = "std", test))] -impl<'de> Deserialize<'de> for Timestamp { - fn deserialize>(deserializer: D) -> Result { - if deserializer.is_human_readable() { - let value_as_string = String::deserialize(deserializer)?; - Timestamp::from_str(&value_as_string).map_err(SerdeError::custom) - } else { - let inner = u64::deserialize(deserializer)?; - Ok(Timestamp(inner)) - } - } -} - -impl ToBytes for Timestamp { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - self.0.to_bytes() - } - - fn serialized_length(&self) -> usize { - self.0.serialized_length() - } -} - -impl FromBytes for Timestamp { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - u64::from_bytes(bytes).map(|(inner, remainder)| (Timestamp(inner), remainder)) - } -} - -impl From for Timestamp { - fn from(milliseconds_since_epoch: u64) -> Timestamp { - Timestamp(milliseconds_since_epoch) - } -} - -/// A time difference between two timestamps. -#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr( - feature = "json-schema", - derive(JsonSchema), - schemars(with = "String", description = "Human-readable duration.") -)] -pub struct TimeDiff(u64); - -#[cfg(any(feature = "std", test))] -impl Display for TimeDiff { - fn fmt(&self, f: &mut Formatter) -> fmt::Result { - write!(f, "{}", humantime::format_duration(Duration::from(*self))) - } -} - -#[cfg(any(feature = "std", test))] -impl FromStr for TimeDiff { - type Err = DurationError; - - fn from_str(value: &str) -> Result { - let inner = humantime::parse_duration(value)?.as_millis() as u64; - Ok(TimeDiff(inner)) - } -} - -impl TimeDiff { - /// Returns the time difference as the number of milliseconds since the Unix epoch - pub fn millis(&self) -> u64 { - self.0 - } - - /// Creates a new time difference from seconds. - pub const fn from_seconds(seconds: u32) -> Self { - TimeDiff(seconds as u64 * 1_000) - } - - /// Creates a new time difference from milliseconds. - pub const fn from_millis(millis: u64) -> Self { - TimeDiff(millis) - } - - /// Returns the sum, or `TimeDiff(u64::MAX)` if it would overflow. - #[must_use] - pub fn saturating_add(self, rhs: u64) -> Self { - TimeDiff(self.0.saturating_add(rhs)) - } - - /// Returns the product, or `TimeDiff(u64::MAX)` if it would overflow. - #[must_use] - pub fn saturating_mul(self, rhs: u64) -> Self { - TimeDiff(self.0.saturating_mul(rhs)) - } - - /// Returns the product, or `None` if it would overflow. - #[must_use] - pub fn checked_mul(self, rhs: u64) -> Option { - Some(TimeDiff(self.0.checked_mul(rhs)?)) - } -} - -impl Add for TimeDiff { - type Output = TimeDiff; - - fn add(self, rhs: TimeDiff) -> TimeDiff { - TimeDiff(self.0 + rhs.0) - } -} - -impl AddAssign for TimeDiff { - fn add_assign(&mut self, rhs: TimeDiff) { - self.0 += rhs.0; - } -} - -impl Sub for TimeDiff { - type Output = TimeDiff; - - fn sub(self, rhs: TimeDiff) -> TimeDiff { - TimeDiff(self.0 - rhs.0) - } -} - -impl SubAssign for TimeDiff { - fn sub_assign(&mut self, rhs: TimeDiff) { - self.0 -= rhs.0; - } -} - -impl Mul for TimeDiff { - type Output = TimeDiff; - - fn mul(self, rhs: u64) -> TimeDiff { - TimeDiff(self.0 * rhs) - } -} - -impl Div for TimeDiff { - type Output = TimeDiff; - - fn div(self, rhs: u64) -> TimeDiff { - TimeDiff(self.0 / rhs) - } -} - -impl Div for TimeDiff { - type Output = u64; - - fn div(self, rhs: TimeDiff) -> u64 { - self.0 / rhs.0 - } -} - -impl From for Duration { - fn from(diff: TimeDiff) -> Duration { - Duration::from_millis(diff.0) - } -} - -#[cfg(any(feature = "std", test))] -impl Serialize for TimeDiff { - fn serialize(&self, serializer: S) -> Result { - if serializer.is_human_readable() { - self.to_string().serialize(serializer) - } else { - self.0.serialize(serializer) - } - } -} - -#[cfg(any(feature = "std", test))] -impl<'de> Deserialize<'de> for TimeDiff { - fn deserialize>(deserializer: D) -> Result { - if deserializer.is_human_readable() { - let value_as_string = String::deserialize(deserializer)?; - TimeDiff::from_str(&value_as_string).map_err(SerdeError::custom) - } else { - let inner = u64::deserialize(deserializer)?; - Ok(TimeDiff(inner)) - } - } -} - -impl ToBytes for TimeDiff { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - self.0.to_bytes() - } - - fn serialized_length(&self) -> usize { - self.0.serialized_length() - } -} - -impl FromBytes for TimeDiff { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - u64::from_bytes(bytes).map(|(inner, remainder)| (TimeDiff(inner), remainder)) - } -} - -impl From for TimeDiff { - fn from(duration: Duration) -> TimeDiff { - TimeDiff(duration.as_millis() as u64) - } -} - -/// A module for the `[serde(with = serde_option_time_diff)]` attribute, to serialize and -/// deserialize `Option` treating `None` as 0. -#[cfg(any(feature = "std", test))] -pub mod serde_option_time_diff { - use super::*; - - /// Serializes an `Option`, using `0` if the value is `None`. - pub fn serialize( - maybe_td: &Option, - serializer: S, - ) -> Result { - maybe_td - .unwrap_or_else(|| TimeDiff::from_millis(0)) - .serialize(serializer) - } - - /// Deserializes an `Option`, returning `None` if the value is `0`. - pub fn deserialize<'de, D: Deserializer<'de>>( - deserializer: D, - ) -> Result, D::Error> { - let td = TimeDiff::deserialize(deserializer)?; - if td.0 == 0 { - Ok(None) - } else { - Ok(Some(td)) - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn timestamp_serialization_roundtrip() { - let timestamp = Timestamp::now(); - - let timestamp_as_string = timestamp.to_string(); - assert_eq!( - timestamp, - Timestamp::from_str(×tamp_as_string).unwrap() - ); - - let serialized_json = serde_json::to_string(×tamp).unwrap(); - assert_eq!(timestamp, serde_json::from_str(&serialized_json).unwrap()); - - let serialized_bincode = bincode::serialize(×tamp).unwrap(); - assert_eq!( - timestamp, - bincode::deserialize(&serialized_bincode).unwrap() - ); - - bytesrepr::test_serialization_roundtrip(×tamp); - } - - #[test] - fn timediff_serialization_roundtrip() { - let mut rng = TestRng::new(); - let timediff = TimeDiff(rng.gen()); - - let timediff_as_string = timediff.to_string(); - assert_eq!(timediff, TimeDiff::from_str(&timediff_as_string).unwrap()); - - let serialized_json = serde_json::to_string(&timediff).unwrap(); - assert_eq!(timediff, serde_json::from_str(&serialized_json).unwrap()); - - let serialized_bincode = bincode::serialize(&timediff).unwrap(); - assert_eq!(timediff, bincode::deserialize(&serialized_bincode).unwrap()); - - bytesrepr::test_serialization_roundtrip(&timediff); - } - - #[test] - fn does_not_crash_for_big_timestamp_value() { - assert!(Timestamp::MAX.to_string().starts_with("Invalid timestamp:")); - } -} diff --git a/casper_types/src/transfer.rs b/casper_types/src/transfer.rs deleted file mode 100644 index 23f51df8..00000000 --- a/casper_types/src/transfer.rs +++ /dev/null @@ -1,506 +0,0 @@ -// TODO - remove once schemars stops causing warning. -#![allow(clippy::field_reassign_with_default)] - -use alloc::{format, string::String, vec::Vec}; -use core::{ - array::TryFromSliceError, - convert::TryFrom, - fmt::{self, Debug, Display, Formatter}, -}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -use rand::{ - distributions::{Distribution, Standard}, - Rng, -}; -#[cfg(feature = "json-schema")] -use schemars::{gen::SchemaGenerator, schema::Schema, JsonSchema}; -use serde::{de::Error as SerdeError, Deserialize, Deserializer, Serialize, Serializer}; - -use crate::{ - account::AccountHash, - bytesrepr::{self, FromBytes, ToBytes}, - checksummed_hex, CLType, CLTyped, URef, U512, -}; - -/// The length of a deploy hash. -pub const DEPLOY_HASH_LENGTH: usize = 32; -/// The length of a transfer address. -pub const TRANSFER_ADDR_LENGTH: usize = 32; -pub(super) const TRANSFER_ADDR_FORMATTED_STRING_PREFIX: &str = "transfer-"; - -/// A newtype wrapping a [u8; [DEPLOY_HASH_LENGTH]] which is the raw bytes of the -/// deploy hash. -#[derive(Default, PartialOrd, Ord, PartialEq, Eq, Hash, Clone, Copy)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -pub struct DeployHash([u8; DEPLOY_HASH_LENGTH]); - -impl DeployHash { - /// Constructs a new `DeployHash` instance from the raw bytes of a deploy hash. - pub const fn new(value: [u8; DEPLOY_HASH_LENGTH]) -> DeployHash { - DeployHash(value) - } - - /// Returns the raw bytes of the deploy hash as an array. - pub fn value(&self) -> [u8; DEPLOY_HASH_LENGTH] { - self.0 - } - - /// Returns the raw bytes of the deploy hash as a `slice`. - pub fn as_bytes(&self) -> &[u8] { - &self.0 - } -} - -#[cfg(feature = "json-schema")] -impl JsonSchema for DeployHash { - fn schema_name() -> String { - String::from("DeployHash") - } - - fn json_schema(gen: &mut SchemaGenerator) -> Schema { - let schema = gen.subschema_for::(); - let mut schema_object = schema.into_object(); - schema_object.metadata().description = Some("Hex-encoded deploy hash.".to_string()); - schema_object.into() - } -} - -impl ToBytes for DeployHash { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - self.0.to_bytes() - } - - fn serialized_length(&self) -> usize { - self.0.serialized_length() - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.0.write_bytes(writer)?; - Ok(()) - } -} - -impl FromBytes for DeployHash { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - <[u8; DEPLOY_HASH_LENGTH]>::from_bytes(bytes) - .map(|(inner, remainder)| (DeployHash(inner), remainder)) - } -} - -impl Serialize for DeployHash { - fn serialize(&self, serializer: S) -> Result { - if serializer.is_human_readable() { - base16::encode_lower(&self.0).serialize(serializer) - } else { - self.0.serialize(serializer) - } - } -} - -impl<'de> Deserialize<'de> for DeployHash { - fn deserialize>(deserializer: D) -> Result { - let bytes = if deserializer.is_human_readable() { - let hex_string = String::deserialize(deserializer)?; - let vec_bytes = - checksummed_hex::decode(hex_string.as_bytes()).map_err(SerdeError::custom)?; - <[u8; DEPLOY_HASH_LENGTH]>::try_from(vec_bytes.as_ref()).map_err(SerdeError::custom)? - } else { - <[u8; DEPLOY_HASH_LENGTH]>::deserialize(deserializer)? - }; - Ok(DeployHash(bytes)) - } -} - -impl Debug for DeployHash { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - write!(formatter, "DeployHash({})", base16::encode_lower(&self.0)) - } -} - -impl Distribution for Standard { - fn sample(&self, rng: &mut R) -> DeployHash { - DeployHash::new(rng.gen()) - } -} - -/// Represents a transfer from one purse to another -#[derive(Debug, Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Serialize, Deserialize, Default)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[serde(deny_unknown_fields)] -pub struct Transfer { - /// Deploy that created the transfer - pub deploy_hash: DeployHash, - /// Account from which transfer was executed - pub from: AccountHash, - /// Account to which funds are transferred - pub to: Option, - /// Source purse - pub source: URef, - /// Target purse - pub target: URef, - /// Transfer amount - pub amount: U512, - /// Gas - pub gas: U512, - /// User-defined id - pub id: Option, -} - -impl Transfer { - /// Creates a [`Transfer`]. - #[allow(clippy::too_many_arguments)] - pub fn new( - deploy_hash: DeployHash, - from: AccountHash, - to: Option, - source: URef, - target: URef, - amount: U512, - gas: U512, - id: Option, - ) -> Self { - Transfer { - deploy_hash, - from, - to, - source, - target, - amount, - gas, - id, - } - } -} - -impl FromBytes for Transfer { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (deploy_hash, rem) = FromBytes::from_bytes(bytes)?; - let (from, rem) = AccountHash::from_bytes(rem)?; - let (to, rem) = >::from_bytes(rem)?; - let (source, rem) = URef::from_bytes(rem)?; - let (target, rem) = URef::from_bytes(rem)?; - let (amount, rem) = U512::from_bytes(rem)?; - let (gas, rem) = U512::from_bytes(rem)?; - let (id, rem) = >::from_bytes(rem)?; - Ok(( - Transfer { - deploy_hash, - from, - to, - source, - target, - amount, - gas, - id, - }, - rem, - )) - } -} - -impl ToBytes for Transfer { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut result = bytesrepr::allocate_buffer(self)?; - self.deploy_hash.write_bytes(&mut result)?; - self.from.write_bytes(&mut result)?; - self.to.write_bytes(&mut result)?; - self.source.write_bytes(&mut result)?; - self.target.write_bytes(&mut result)?; - self.amount.write_bytes(&mut result)?; - self.gas.write_bytes(&mut result)?; - self.id.write_bytes(&mut result)?; - Ok(result) - } - - fn serialized_length(&self) -> usize { - self.deploy_hash.serialized_length() - + self.from.serialized_length() - + self.to.serialized_length() - + self.source.serialized_length() - + self.target.serialized_length() - + self.amount.serialized_length() - + self.gas.serialized_length() - + self.id.serialized_length() - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.deploy_hash.write_bytes(writer)?; - self.from.write_bytes(writer)?; - self.to.write_bytes(writer)?; - self.source.write_bytes(writer)?; - self.target.write_bytes(writer)?; - self.amount.write_bytes(writer)?; - self.gas.write_bytes(writer)?; - self.id.write_bytes(writer)?; - Ok(()) - } -} - -/// Error returned when decoding a `TransferAddr` from a formatted string. -#[derive(Debug)] -#[non_exhaustive] -pub enum FromStrError { - /// The prefix is invalid. - InvalidPrefix, - /// The address is not valid hex. - Hex(base16::DecodeError), - /// The slice is the wrong length. - Length(TryFromSliceError), -} - -impl From for FromStrError { - fn from(error: base16::DecodeError) -> Self { - FromStrError::Hex(error) - } -} - -impl From for FromStrError { - fn from(error: TryFromSliceError) -> Self { - FromStrError::Length(error) - } -} - -impl Display for FromStrError { - fn fmt(&self, f: &mut Formatter) -> fmt::Result { - match self { - FromStrError::InvalidPrefix => write!(f, "prefix is not 'transfer-'"), - FromStrError::Hex(error) => { - write!(f, "failed to decode address portion from hex: {}", error) - } - FromStrError::Length(error) => write!(f, "address portion is wrong length: {}", error), - } - } -} - -/// A newtype wrapping a [u8; [TRANSFER_ADDR_LENGTH]] which is the raw bytes of the -/// transfer address. -#[derive(Default, PartialOrd, Ord, PartialEq, Eq, Hash, Clone, Copy)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -pub struct TransferAddr([u8; TRANSFER_ADDR_LENGTH]); - -impl TransferAddr { - /// Constructs a new `TransferAddr` instance from the raw bytes. - pub const fn new(value: [u8; TRANSFER_ADDR_LENGTH]) -> TransferAddr { - TransferAddr(value) - } - - /// Returns the raw bytes of the transfer address as an array. - pub fn value(&self) -> [u8; TRANSFER_ADDR_LENGTH] { - self.0 - } - - /// Returns the raw bytes of the transfer address as a `slice`. - pub fn as_bytes(&self) -> &[u8] { - &self.0 - } - - /// Formats the `TransferAddr` as a prefixed, hex-encoded string. - pub fn to_formatted_string(self) -> String { - format!( - "{}{}", - TRANSFER_ADDR_FORMATTED_STRING_PREFIX, - base16::encode_lower(&self.0), - ) - } - - /// Parses a string formatted as per `Self::to_formatted_string()` into a `TransferAddr`. - pub fn from_formatted_str(input: &str) -> Result { - let remainder = input - .strip_prefix(TRANSFER_ADDR_FORMATTED_STRING_PREFIX) - .ok_or(FromStrError::InvalidPrefix)?; - let bytes = - <[u8; TRANSFER_ADDR_LENGTH]>::try_from(checksummed_hex::decode(remainder)?.as_ref())?; - Ok(TransferAddr(bytes)) - } -} - -#[cfg(feature = "json-schema")] -impl JsonSchema for TransferAddr { - fn schema_name() -> String { - String::from("TransferAddr") - } - - fn json_schema(gen: &mut SchemaGenerator) -> Schema { - let schema = gen.subschema_for::(); - let mut schema_object = schema.into_object(); - schema_object.metadata().description = Some("Hex-encoded transfer address.".to_string()); - schema_object.into() - } -} - -impl Serialize for TransferAddr { - fn serialize(&self, serializer: S) -> Result { - if serializer.is_human_readable() { - self.to_formatted_string().serialize(serializer) - } else { - self.0.serialize(serializer) - } - } -} - -impl<'de> Deserialize<'de> for TransferAddr { - fn deserialize>(deserializer: D) -> Result { - if deserializer.is_human_readable() { - let formatted_string = String::deserialize(deserializer)?; - TransferAddr::from_formatted_str(&formatted_string).map_err(SerdeError::custom) - } else { - let bytes = <[u8; TRANSFER_ADDR_LENGTH]>::deserialize(deserializer)?; - Ok(TransferAddr(bytes)) - } - } -} - -impl Display for TransferAddr { - fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result { - write!(f, "{}", base16::encode_lower(&self.0)) - } -} - -impl Debug for TransferAddr { - fn fmt(&self, f: &mut Formatter) -> core::fmt::Result { - write!(f, "TransferAddr({})", base16::encode_lower(&self.0)) - } -} - -impl CLTyped for TransferAddr { - fn cl_type() -> CLType { - CLType::ByteArray(TRANSFER_ADDR_LENGTH as u32) - } -} - -impl ToBytes for TransferAddr { - #[inline(always)] - fn to_bytes(&self) -> Result, bytesrepr::Error> { - self.0.to_bytes() - } - - #[inline(always)] - fn serialized_length(&self) -> usize { - self.0.serialized_length() - } - - #[inline(always)] - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.0.write_bytes(writer)?; - Ok(()) - } -} - -impl FromBytes for TransferAddr { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (bytes, remainder) = FromBytes::from_bytes(bytes)?; - Ok((TransferAddr::new(bytes), remainder)) - } -} - -impl AsRef<[u8]> for TransferAddr { - fn as_ref(&self) -> &[u8] { - self.0.as_ref() - } -} - -impl Distribution for Standard { - fn sample(&self, rng: &mut R) -> TransferAddr { - TransferAddr::new(rng.gen()) - } -} - -/// Generators for [`Transfer`] -#[cfg(any(feature = "testing", feature = "gens", test))] -pub mod gens { - use proptest::prelude::{prop::option, Arbitrary, Strategy}; - - use crate::{ - deploy_info::gens::{account_hash_arb, deploy_hash_arb}, - gens::{u512_arb, uref_arb}, - Transfer, - }; - - /// Creates an arbitrary [`Transfer`] - pub fn transfer_arb() -> impl Strategy { - ( - deploy_hash_arb(), - account_hash_arb(), - option::of(account_hash_arb()), - uref_arb(), - uref_arb(), - u512_arb(), - u512_arb(), - option::of(::arbitrary()), - ) - .prop_map(|(deploy_hash, from, to, source, target, amount, gas, id)| { - Transfer { - deploy_hash, - from, - to, - source, - target, - amount, - gas, - id, - } - }) - } -} - -#[cfg(test)] -mod tests { - use proptest::prelude::*; - - use crate::bytesrepr; - - use super::*; - - proptest! { - #[test] - fn test_serialization_roundtrip(transfer in gens::transfer_arb()) { - bytesrepr::test_serialization_roundtrip(&transfer) - } - } - - #[test] - fn transfer_addr_from_str() { - let transfer_address = TransferAddr([4; 32]); - let encoded = transfer_address.to_formatted_string(); - let decoded = TransferAddr::from_formatted_str(&encoded).unwrap(); - assert_eq!(transfer_address, decoded); - - let invalid_prefix = - "transfe-0000000000000000000000000000000000000000000000000000000000000000"; - assert!(TransferAddr::from_formatted_str(invalid_prefix).is_err()); - - let invalid_prefix = - "transfer0000000000000000000000000000000000000000000000000000000000000000"; - assert!(TransferAddr::from_formatted_str(invalid_prefix).is_err()); - - let short_addr = "transfer-00000000000000000000000000000000000000000000000000000000000000"; - assert!(TransferAddr::from_formatted_str(short_addr).is_err()); - - let long_addr = - "transfer-000000000000000000000000000000000000000000000000000000000000000000"; - assert!(TransferAddr::from_formatted_str(long_addr).is_err()); - - let invalid_hex = - "transfer-000000000000000000000000000000000000000000000000000000000000000g"; - assert!(TransferAddr::from_formatted_str(invalid_hex).is_err()); - } - - #[test] - fn transfer_addr_serde_roundtrip() { - let transfer_address = TransferAddr([255; 32]); - let serialized = bincode::serialize(&transfer_address).unwrap(); - let decoded = bincode::deserialize(&serialized).unwrap(); - assert_eq!(transfer_address, decoded); - } - - #[test] - fn transfer_addr_json_roundtrip() { - let transfer_address = TransferAddr([255; 32]); - let json_string = serde_json::to_string_pretty(&transfer_address).unwrap(); - let decoded = serde_json::from_str(&json_string).unwrap(); - assert_eq!(transfer_address, decoded); - } -} diff --git a/casper_types/src/transfer_result.rs b/casper_types/src/transfer_result.rs deleted file mode 100644 index ba9ce66b..00000000 --- a/casper_types/src/transfer_result.rs +++ /dev/null @@ -1,39 +0,0 @@ -use core::fmt::Debug; - -use crate::ApiError; - -/// The result of an attempt to transfer between purses. -pub type TransferResult = Result; - -/// The result of a successful transfer between purses. -#[derive(Debug, Copy, Clone, PartialEq, Eq)] -#[repr(i32)] -pub enum TransferredTo { - /// The destination account already existed. - ExistingAccount = 0, - /// The destination account was created. - NewAccount = 1, -} - -impl TransferredTo { - /// Converts an `i32` to a [`TransferResult`], where: - /// * `0` represents `Ok(TransferredTo::ExistingAccount)`, - /// * `1` represents `Ok(TransferredTo::NewAccount)`, - /// * all other inputs are mapped to `Err(ApiError::Transfer)`. - pub fn result_from(value: i32) -> TransferResult { - match value { - x if x == TransferredTo::ExistingAccount as i32 => Ok(TransferredTo::ExistingAccount), - x if x == TransferredTo::NewAccount as i32 => Ok(TransferredTo::NewAccount), - _ => Err(ApiError::Transfer), - } - } - - // This conversion is not intended to be used by third party crates. - #[doc(hidden)] - pub fn i32_from(result: TransferResult) -> i32 { - match result { - Ok(transferred_to) => transferred_to as i32, - Err(_) => 2, - } - } -} diff --git a/casper_types/src/uint.rs b/casper_types/src/uint.rs deleted file mode 100644 index bdb30a45..00000000 --- a/casper_types/src/uint.rs +++ /dev/null @@ -1,1001 +0,0 @@ -use alloc::{ - format, - string::{String, ToString}, - vec::Vec, -}; -use core::{ - fmt::{self, Formatter}, - iter::Sum, - ops::Add, -}; - -use num_integer::Integer; -use num_traits::{ - AsPrimitive, Bounded, CheckedAdd, CheckedMul, CheckedSub, Num, One, Unsigned, WrappingAdd, - WrappingSub, Zero, -}; -use rand::{ - distributions::{Distribution, Standard}, - Rng, -}; -use serde::{ - de::{self, Deserialize, Deserializer, MapAccess, SeqAccess, Visitor}, - ser::{Serialize, SerializeStruct, Serializer}, -}; - -use crate::bytesrepr::{self, Error, FromBytes, ToBytes, U8_SERIALIZED_LENGTH}; - -#[allow( - clippy::assign_op_pattern, - clippy::ptr_offset_with_cast, - clippy::manual_range_contains, - clippy::range_plus_one, - clippy::transmute_ptr_to_ptr, - clippy::reversed_empty_ranges -)] -mod macro_code { - #[cfg(feature = "datasize")] - use datasize::DataSize; - use uint::construct_uint; - - construct_uint! { - #[cfg_attr(feature = "datasize", derive(DataSize))] - pub struct U512(8); - } - construct_uint! { - #[cfg_attr(feature = "datasize", derive(DataSize))] - pub struct U256(4); - } - construct_uint! { - #[cfg_attr(feature = "datasize", derive(DataSize))] - pub struct U128(2); - } -} - -pub use self::macro_code::{U128, U256, U512}; - -/// Error type for parsing [`U128`], [`U256`], [`U512`] from a string. -#[derive(Debug)] -#[non_exhaustive] -pub enum UIntParseError { - /// Contains the parsing error from the `uint` crate, which only supports base-10 parsing. - FromDecStr(uint::FromDecStrErr), - /// Parsing was attempted on a string representing the number in some base other than 10. - /// - /// Note: a general radix may be supported in the future. - InvalidRadix, -} - -macro_rules! impl_traits_for_uint { - ($type:ident, $total_bytes:expr, $test_mod:ident) => { - impl Serialize for $type { - fn serialize(&self, serializer: S) -> Result { - if serializer.is_human_readable() { - return self.to_string().serialize(serializer); - } - - let mut buffer = [0u8; $total_bytes]; - self.to_little_endian(&mut buffer); - let non_zero_bytes: Vec = buffer - .iter() - .rev() - .skip_while(|b| **b == 0) - .cloned() - .collect(); - let num_bytes = non_zero_bytes.len(); - - let mut state = serializer.serialize_struct("bigint", num_bytes + 1)?; - state.serialize_field("", &(num_bytes as u8))?; - - for byte in non_zero_bytes.into_iter().rev() { - state.serialize_field("", &byte)?; - } - state.end() - } - } - - impl<'de> Deserialize<'de> for $type { - fn deserialize>(deserializer: D) -> Result { - struct BigNumVisitor; - - impl<'de> Visitor<'de> for BigNumVisitor { - type Value = $type; - - fn expecting(&self, formatter: &mut Formatter) -> fmt::Result { - formatter.write_str("bignum struct") - } - - fn visit_seq>( - self, - mut sequence: V, - ) -> Result<$type, V::Error> { - let length: u8 = sequence - .next_element()? - .ok_or_else(|| de::Error::invalid_length(0, &self))?; - let mut buffer = [0u8; $total_bytes]; - for index in 0..length as usize { - let value = sequence - .next_element()? - .ok_or_else(|| de::Error::invalid_length(index + 1, &self))?; - buffer[index as usize] = value; - } - let result = $type::from_little_endian(&buffer); - Ok(result) - } - - fn visit_map>(self, mut map: V) -> Result<$type, V::Error> { - let _length_key: u8 = map - .next_key()? - .ok_or_else(|| de::Error::missing_field("length"))?; - let length: u8 = map - .next_value() - .map_err(|_| de::Error::invalid_length(0, &self))?; - let mut buffer = [0u8; $total_bytes]; - for index in 0..length { - let _byte_key: u8 = map - .next_key()? - .ok_or_else(|| de::Error::missing_field("byte"))?; - let value = map.next_value().map_err(|_| { - de::Error::invalid_length(index as usize + 1, &self) - })?; - buffer[index as usize] = value; - } - let result = $type::from_little_endian(&buffer); - Ok(result) - } - } - - const FIELDS: &'static [&'static str] = &[ - "0", "1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", "13", "14", - "15", "16", "17", "18", "19", "20", "21", "22", "23", "24", "25", "26", "27", - "28", "29", "30", "31", "32", "33", "34", "35", "36", "37", "38", "39", "40", - "41", "42", "43", "44", "45", "46", "47", "48", "49", "50", "51", "52", "53", - "54", "55", "56", "57", "58", "59", "60", "61", "62", "63", "64", - ]; - - if deserializer.is_human_readable() { - let decimal_string = String::deserialize(deserializer)?; - return Self::from_dec_str(&decimal_string) - .map_err(|error| de::Error::custom(format!("{:?}", error))); - } - - deserializer.deserialize_struct("bigint", FIELDS, BigNumVisitor) - } - } - - impl ToBytes for $type { - fn to_bytes(&self) -> Result, Error> { - let mut buf = [0u8; $total_bytes]; - self.to_little_endian(&mut buf); - let mut non_zero_bytes: Vec = - buf.iter().rev().skip_while(|b| **b == 0).cloned().collect(); - let num_bytes = non_zero_bytes.len() as u8; - non_zero_bytes.push(num_bytes); - non_zero_bytes.reverse(); - Ok(non_zero_bytes) - } - - fn serialized_length(&self) -> usize { - let mut buf = [0u8; $total_bytes]; - self.to_little_endian(&mut buf); - let non_zero_bytes = buf.iter().rev().skip_while(|b| **b == 0).count(); - U8_SERIALIZED_LENGTH + non_zero_bytes - } - } - - impl FromBytes for $type { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (num_bytes, rem): (u8, &[u8]) = FromBytes::from_bytes(bytes)?; - - if num_bytes > $total_bytes { - Err(Error::Formatting) - } else { - let (value, rem) = bytesrepr::safe_split_at(rem, num_bytes as usize)?; - let result = $type::from_little_endian(value); - Ok((result, rem)) - } - } - } - - // Trait implementations for unifying U* as numeric types - impl Zero for $type { - fn zero() -> Self { - $type::zero() - } - - fn is_zero(&self) -> bool { - self.is_zero() - } - } - - impl One for $type { - fn one() -> Self { - $type::one() - } - } - - // Requires Zero and One to be implemented - impl Num for $type { - type FromStrRadixErr = UIntParseError; - fn from_str_radix(str: &str, radix: u32) -> Result { - if radix == 10 { - $type::from_dec_str(str).map_err(UIntParseError::FromDecStr) - } else { - // TODO: other radix parsing - Err(UIntParseError::InvalidRadix) - } - } - } - - // Requires Num to be implemented - impl Unsigned for $type {} - - // Additional numeric trait, which also holds for these types - impl Bounded for $type { - fn min_value() -> Self { - $type::zero() - } - - fn max_value() -> Self { - $type::MAX - } - } - - // Instead of implementing arbitrary methods we can use existing traits from num_trait - // crate. - impl WrappingAdd for $type { - fn wrapping_add(&self, other: &$type) -> $type { - self.overflowing_add(*other).0 - } - } - - impl WrappingSub for $type { - fn wrapping_sub(&self, other: &$type) -> $type { - self.overflowing_sub(*other).0 - } - } - - impl CheckedMul for $type { - fn checked_mul(&self, v: &$type) -> Option<$type> { - $type::checked_mul(*self, *v) - } - } - - impl CheckedSub for $type { - fn checked_sub(&self, v: &$type) -> Option<$type> { - $type::checked_sub(*self, *v) - } - } - - impl CheckedAdd for $type { - fn checked_add(&self, v: &$type) -> Option<$type> { - $type::checked_add(*self, *v) - } - } - - impl Integer for $type { - /// Unsigned integer division. Returns the same result as `div` (`/`). - #[inline] - fn div_floor(&self, other: &Self) -> Self { - *self / *other - } - - /// Unsigned integer modulo operation. Returns the same result as `rem` (`%`). - #[inline] - fn mod_floor(&self, other: &Self) -> Self { - *self % *other - } - - /// Calculates the Greatest Common Divisor (GCD) of the number and `other` - #[inline] - fn gcd(&self, other: &Self) -> Self { - let zero = Self::zero(); - // Use Stein's algorithm - let mut m = *self; - let mut n = *other; - if m == zero || n == zero { - return m | n; - } - - // find common factors of 2 - let shift = (m | n).trailing_zeros(); - - // divide n and m by 2 until odd - m >>= m.trailing_zeros(); - n >>= n.trailing_zeros(); - - while m != n { - if m > n { - m -= n; - m >>= m.trailing_zeros(); - } else { - n -= m; - n >>= n.trailing_zeros(); - } - } - m << shift - } - - /// Calculates the Lowest Common Multiple (LCM) of the number and `other`. - #[inline] - fn lcm(&self, other: &Self) -> Self { - self.gcd_lcm(other).1 - } - - /// Calculates the Greatest Common Divisor (GCD) and - /// Lowest Common Multiple (LCM) of the number and `other`. - #[inline] - fn gcd_lcm(&self, other: &Self) -> (Self, Self) { - if self.is_zero() && other.is_zero() { - return (Self::zero(), Self::zero()); - } - let gcd = self.gcd(other); - let lcm = *self * (*other / gcd); - (gcd, lcm) - } - - /// Deprecated, use `is_multiple_of` instead. - #[inline] - fn divides(&self, other: &Self) -> bool { - self.is_multiple_of(other) - } - - /// Returns `true` if the number is a multiple of `other`. - #[inline] - fn is_multiple_of(&self, other: &Self) -> bool { - *self % *other == $type::zero() - } - - /// Returns `true` if the number is divisible by `2`. - #[inline] - fn is_even(&self) -> bool { - (self.0[0]) & 1 == 0 - } - - /// Returns `true` if the number is not divisible by `2`. - #[inline] - fn is_odd(&self) -> bool { - !self.is_even() - } - - /// Simultaneous truncated integer division and modulus. - #[inline] - fn div_rem(&self, other: &Self) -> (Self, Self) { - (*self / *other, *self % *other) - } - } - - impl AsPrimitive<$type> for i32 { - fn as_(self) -> $type { - if self >= 0 { - $type::from(self as u32) - } else { - let abs = 0u32.wrapping_sub(self as u32); - $type::zero().wrapping_sub(&$type::from(abs)) - } - } - } - - impl AsPrimitive<$type> for i64 { - fn as_(self) -> $type { - if self >= 0 { - $type::from(self as u64) - } else { - let abs = 0u64.wrapping_sub(self as u64); - $type::zero().wrapping_sub(&$type::from(abs)) - } - } - } - - impl AsPrimitive<$type> for u8 { - fn as_(self) -> $type { - $type::from(self) - } - } - - impl AsPrimitive<$type> for u32 { - fn as_(self) -> $type { - $type::from(self) - } - } - - impl AsPrimitive<$type> for u64 { - fn as_(self) -> $type { - $type::from(self) - } - } - - impl AsPrimitive for $type { - fn as_(self) -> i32 { - self.0[0] as i32 - } - } - - impl AsPrimitive for $type { - fn as_(self) -> i64 { - self.0[0] as i64 - } - } - - impl AsPrimitive for $type { - fn as_(self) -> u8 { - self.0[0] as u8 - } - } - - impl AsPrimitive for $type { - fn as_(self) -> u32 { - self.0[0] as u32 - } - } - - impl AsPrimitive for $type { - fn as_(self) -> u64 { - self.0[0] - } - } - - impl Sum for $type { - fn sum>(iter: I) -> Self { - iter.fold($type::zero(), Add::add) - } - } - - impl Distribution<$type> for Standard { - fn sample(&self, rng: &mut R) -> $type { - let mut raw_bytes = [0u8; $total_bytes]; - rng.fill_bytes(raw_bytes.as_mut()); - $type::from(raw_bytes) - } - } - - #[cfg(feature = "json-schema")] - impl schemars::JsonSchema for $type { - fn schema_name() -> String { - format!("U{}", $total_bytes * 8) - } - - fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { - let schema = gen.subschema_for::(); - let mut schema_object = schema.into_object(); - schema_object.metadata().description = Some(format!( - "Decimal representation of a {}-bit integer.", - $total_bytes * 8 - )); - schema_object.into() - } - } - - #[cfg(test)] - mod $test_mod { - use super::*; - - #[test] - fn test_div_mod_floor() { - assert_eq!($type::from(10).div_floor(&$type::from(3)), $type::from(3)); - assert_eq!($type::from(10).mod_floor(&$type::from(3)), $type::from(1)); - assert_eq!( - $type::from(10).div_mod_floor(&$type::from(3)), - ($type::from(3), $type::from(1)) - ); - assert_eq!($type::from(5).div_floor(&$type::from(5)), $type::from(1)); - assert_eq!($type::from(5).mod_floor(&$type::from(5)), $type::from(0)); - assert_eq!( - $type::from(5).div_mod_floor(&$type::from(5)), - ($type::from(1), $type::from(0)) - ); - assert_eq!($type::from(3).div_floor(&$type::from(7)), $type::from(0)); - assert_eq!($type::from(3).mod_floor(&$type::from(7)), $type::from(3)); - assert_eq!( - $type::from(3).div_mod_floor(&$type::from(7)), - ($type::from(0), $type::from(3)) - ); - } - - #[test] - fn test_gcd() { - assert_eq!($type::from(10).gcd(&$type::from(2)), $type::from(2)); - assert_eq!($type::from(10).gcd(&$type::from(3)), $type::from(1)); - assert_eq!($type::from(0).gcd(&$type::from(3)), $type::from(3)); - assert_eq!($type::from(3).gcd(&$type::from(3)), $type::from(3)); - assert_eq!($type::from(56).gcd(&$type::from(42)), $type::from(14)); - assert_eq!( - $type::MAX.gcd(&($type::MAX / $type::from(2))), - $type::from(1) - ); - assert_eq!($type::from(15).gcd(&$type::from(17)), $type::from(1)); - } - - #[test] - fn test_lcm() { - assert_eq!($type::from(1).lcm(&$type::from(0)), $type::from(0)); - assert_eq!($type::from(0).lcm(&$type::from(1)), $type::from(0)); - assert_eq!($type::from(1).lcm(&$type::from(1)), $type::from(1)); - assert_eq!($type::from(8).lcm(&$type::from(9)), $type::from(72)); - assert_eq!($type::from(11).lcm(&$type::from(5)), $type::from(55)); - assert_eq!($type::from(15).lcm(&$type::from(17)), $type::from(255)); - assert_eq!($type::from(4).lcm(&$type::from(8)), $type::from(8)); - } - - #[test] - fn test_is_multiple_of() { - assert!($type::from(6).is_multiple_of(&$type::from(6))); - assert!($type::from(6).is_multiple_of(&$type::from(3))); - assert!($type::from(6).is_multiple_of(&$type::from(1))); - assert!(!$type::from(3).is_multiple_of(&$type::from(5))) - } - - #[test] - fn is_even() { - assert_eq!($type::from(0).is_even(), true); - assert_eq!($type::from(1).is_even(), false); - assert_eq!($type::from(2).is_even(), true); - assert_eq!($type::from(3).is_even(), false); - assert_eq!($type::from(4).is_even(), true); - } - - #[test] - fn is_odd() { - assert_eq!($type::from(0).is_odd(), false); - assert_eq!($type::from(1).is_odd(), true); - assert_eq!($type::from(2).is_odd(), false); - assert_eq!($type::from(3).is_odd(), true); - assert_eq!($type::from(4).is_odd(), false); - } - - #[test] - #[should_panic] - fn overflow_mul_test() { - let _ = $type::MAX * $type::from(2); - } - - #[test] - #[should_panic] - fn overflow_add_test() { - let _ = $type::MAX + $type::from(1); - } - - #[test] - #[should_panic] - fn underflow_sub_test() { - let _ = $type::zero() - $type::from(1); - } - } - }; -} - -impl_traits_for_uint!(U128, 16, u128_test); -impl_traits_for_uint!(U256, 32, u256_test); -impl_traits_for_uint!(U512, 64, u512_test); - -impl AsPrimitive for U128 { - fn as_(self) -> U128 { - self - } -} - -impl AsPrimitive for U128 { - fn as_(self) -> U256 { - let mut result = U256::zero(); - result.0[..2].clone_from_slice(&self.0[..2]); - result - } -} - -impl AsPrimitive for U128 { - fn as_(self) -> U512 { - let mut result = U512::zero(); - result.0[..2].clone_from_slice(&self.0[..2]); - result - } -} - -impl AsPrimitive for U256 { - fn as_(self) -> U128 { - let mut result = U128::zero(); - result.0[..2].clone_from_slice(&self.0[..2]); - result - } -} - -impl AsPrimitive for U256 { - fn as_(self) -> U256 { - self - } -} - -impl AsPrimitive for U256 { - fn as_(self) -> U512 { - let mut result = U512::zero(); - result.0[..4].clone_from_slice(&self.0[..4]); - result - } -} - -impl AsPrimitive for U512 { - fn as_(self) -> U128 { - let mut result = U128::zero(); - result.0[..2].clone_from_slice(&self.0[..2]); - result - } -} - -impl AsPrimitive for U512 { - fn as_(self) -> U256 { - let mut result = U256::zero(); - result.0[..4].clone_from_slice(&self.0[..4]); - result - } -} - -impl AsPrimitive for U512 { - fn as_(self) -> U512 { - self - } -} - -#[cfg(test)] -mod tests { - use std::fmt::Debug; - - use serde::de::DeserializeOwned; - - use super::*; - - fn check_as_i32>(expected: i32, input: T) { - assert_eq!(expected, input.as_()); - } - - fn check_as_i64>(expected: i64, input: T) { - assert_eq!(expected, input.as_()); - } - - fn check_as_u8>(expected: u8, input: T) { - assert_eq!(expected, input.as_()); - } - - fn check_as_u32>(expected: u32, input: T) { - assert_eq!(expected, input.as_()); - } - - fn check_as_u64>(expected: u64, input: T) { - assert_eq!(expected, input.as_()); - } - - fn check_as_u128>(expected: U128, input: T) { - assert_eq!(expected, input.as_()); - } - - fn check_as_u256>(expected: U256, input: T) { - assert_eq!(expected, input.as_()); - } - - fn check_as_u512>(expected: U512, input: T) { - assert_eq!(expected, input.as_()); - } - - #[test] - fn as_primitive_from_i32() { - let mut input = 0_i32; - check_as_i32(0, input); - check_as_i64(0, input); - check_as_u8(0, input); - check_as_u32(0, input); - check_as_u64(0, input); - check_as_u128(U128::zero(), input); - check_as_u256(U256::zero(), input); - check_as_u512(U512::zero(), input); - - input = i32::max_value() - 1; - check_as_i32(input, input); - check_as_i64(i64::from(input), input); - check_as_u8(input as u8, input); - check_as_u32(input as u32, input); - check_as_u64(input as u64, input); - check_as_u128(U128::from(input), input); - check_as_u256(U256::from(input), input); - check_as_u512(U512::from(input), input); - - input = i32::min_value() + 1; - check_as_i32(input, input); - check_as_i64(i64::from(input), input); - check_as_u8(input as u8, input); - check_as_u32(input as u32, input); - check_as_u64(input as u64, input); - // i32::min_value() is -1 - i32::max_value() - check_as_u128( - U128::zero().wrapping_sub(&U128::from(i32::max_value())), - input, - ); - check_as_u256( - U256::zero().wrapping_sub(&U256::from(i32::max_value())), - input, - ); - check_as_u512( - U512::zero().wrapping_sub(&U512::from(i32::max_value())), - input, - ); - } - - #[test] - fn as_primitive_from_i64() { - let mut input = 0_i64; - check_as_i32(0, input); - check_as_i64(0, input); - check_as_u8(0, input); - check_as_u32(0, input); - check_as_u64(0, input); - check_as_u128(U128::zero(), input); - check_as_u256(U256::zero(), input); - check_as_u512(U512::zero(), input); - - input = i64::max_value() - 1; - check_as_i32(input as i32, input); - check_as_i64(input, input); - check_as_u8(input as u8, input); - check_as_u32(input as u32, input); - check_as_u64(input as u64, input); - check_as_u128(U128::from(input), input); - check_as_u256(U256::from(input), input); - check_as_u512(U512::from(input), input); - - input = i64::min_value() + 1; - check_as_i32(input as i32, input); - check_as_i64(input, input); - check_as_u8(input as u8, input); - check_as_u32(input as u32, input); - check_as_u64(input as u64, input); - // i64::min_value() is (-1 - i64::max_value()) - check_as_u128( - U128::zero().wrapping_sub(&U128::from(i64::max_value())), - input, - ); - check_as_u256( - U256::zero().wrapping_sub(&U256::from(i64::max_value())), - input, - ); - check_as_u512( - U512::zero().wrapping_sub(&U512::from(i64::max_value())), - input, - ); - } - - #[test] - fn as_primitive_from_u8() { - let mut input = 0_u8; - check_as_i32(0, input); - check_as_i64(0, input); - check_as_u8(0, input); - check_as_u32(0, input); - check_as_u64(0, input); - check_as_u128(U128::zero(), input); - check_as_u256(U256::zero(), input); - check_as_u512(U512::zero(), input); - - input = u8::max_value() - 1; - check_as_i32(i32::from(input), input); - check_as_i64(i64::from(input), input); - check_as_u8(input, input); - check_as_u32(u32::from(input), input); - check_as_u64(u64::from(input), input); - check_as_u128(U128::from(input), input); - check_as_u256(U256::from(input), input); - check_as_u512(U512::from(input), input); - } - - #[test] - fn as_primitive_from_u32() { - let mut input = 0_u32; - check_as_i32(0, input); - check_as_i64(0, input); - check_as_u8(0, input); - check_as_u32(0, input); - check_as_u64(0, input); - check_as_u128(U128::zero(), input); - check_as_u256(U256::zero(), input); - check_as_u512(U512::zero(), input); - - input = u32::max_value() - 1; - check_as_i32(input as i32, input); - check_as_i64(i64::from(input), input); - check_as_u8(input as u8, input); - check_as_u32(input, input); - check_as_u64(u64::from(input), input); - check_as_u128(U128::from(input), input); - check_as_u256(U256::from(input), input); - check_as_u512(U512::from(input), input); - } - - #[test] - fn as_primitive_from_u64() { - let mut input = 0_u64; - check_as_i32(0, input); - check_as_i64(0, input); - check_as_u8(0, input); - check_as_u32(0, input); - check_as_u64(0, input); - check_as_u128(U128::zero(), input); - check_as_u256(U256::zero(), input); - check_as_u512(U512::zero(), input); - - input = u64::max_value() - 1; - check_as_i32(input as i32, input); - check_as_i64(input as i64, input); - check_as_u8(input as u8, input); - check_as_u32(input as u32, input); - check_as_u64(input, input); - check_as_u128(U128::from(input), input); - check_as_u256(U256::from(input), input); - check_as_u512(U512::from(input), input); - } - - fn make_little_endian_arrays(little_endian_bytes: &[u8]) -> ([u8; 4], [u8; 8]) { - let le_32 = { - let mut le_32 = [0; 4]; - le_32.copy_from_slice(&little_endian_bytes[..4]); - le_32 - }; - - let le_64 = { - let mut le_64 = [0; 8]; - le_64.copy_from_slice(&little_endian_bytes[..8]); - le_64 - }; - - (le_32, le_64) - } - - #[test] - fn as_primitive_from_u128() { - let mut input = U128::zero(); - check_as_i32(0, input); - check_as_i64(0, input); - check_as_u8(0, input); - check_as_u32(0, input); - check_as_u64(0, input); - check_as_u128(U128::zero(), input); - check_as_u256(U256::zero(), input); - check_as_u512(U512::zero(), input); - - input = U128::max_value() - 1; - - let mut little_endian_bytes = [0_u8; 64]; - input.to_little_endian(&mut little_endian_bytes[..16]); - let (le_32, le_64) = make_little_endian_arrays(&little_endian_bytes); - - check_as_i32(i32::from_le_bytes(le_32), input); - check_as_i64(i64::from_le_bytes(le_64), input); - check_as_u8(little_endian_bytes[0], input); - check_as_u32(u32::from_le_bytes(le_32), input); - check_as_u64(u64::from_le_bytes(le_64), input); - check_as_u128(U128::from_little_endian(&little_endian_bytes[..16]), input); - check_as_u256(U256::from_little_endian(&little_endian_bytes[..32]), input); - check_as_u512(U512::from_little_endian(&little_endian_bytes), input); - } - - #[test] - fn as_primitive_from_u256() { - let mut input = U256::zero(); - check_as_i32(0, input); - check_as_i64(0, input); - check_as_u8(0, input); - check_as_u32(0, input); - check_as_u64(0, input); - check_as_u128(U128::zero(), input); - check_as_u256(U256::zero(), input); - check_as_u512(U512::zero(), input); - - input = U256::max_value() - 1; - - let mut little_endian_bytes = [0_u8; 64]; - input.to_little_endian(&mut little_endian_bytes[..32]); - let (le_32, le_64) = make_little_endian_arrays(&little_endian_bytes); - - check_as_i32(i32::from_le_bytes(le_32), input); - check_as_i64(i64::from_le_bytes(le_64), input); - check_as_u8(little_endian_bytes[0], input); - check_as_u32(u32::from_le_bytes(le_32), input); - check_as_u64(u64::from_le_bytes(le_64), input); - check_as_u128(U128::from_little_endian(&little_endian_bytes[..16]), input); - check_as_u256(U256::from_little_endian(&little_endian_bytes[..32]), input); - check_as_u512(U512::from_little_endian(&little_endian_bytes), input); - } - - #[test] - fn as_primitive_from_u512() { - let mut input = U512::zero(); - check_as_i32(0, input); - check_as_i64(0, input); - check_as_u8(0, input); - check_as_u32(0, input); - check_as_u64(0, input); - check_as_u128(U128::zero(), input); - check_as_u256(U256::zero(), input); - check_as_u512(U512::zero(), input); - - input = U512::max_value() - 1; - - let mut little_endian_bytes = [0_u8; 64]; - input.to_little_endian(&mut little_endian_bytes); - let (le_32, le_64) = make_little_endian_arrays(&little_endian_bytes); - - check_as_i32(i32::from_le_bytes(le_32), input); - check_as_i64(i64::from_le_bytes(le_64), input); - check_as_u8(little_endian_bytes[0], input); - check_as_u32(u32::from_le_bytes(le_32), input); - check_as_u64(u64::from_le_bytes(le_64), input); - check_as_u128(U128::from_little_endian(&little_endian_bytes[..16]), input); - check_as_u256(U256::from_little_endian(&little_endian_bytes[..32]), input); - check_as_u512(U512::from_little_endian(&little_endian_bytes), input); - } - - #[test] - fn wrapping_test_u512() { - let max = U512::max_value(); - let value = max.wrapping_add(&1.into()); - assert_eq!(value, 0.into()); - - let min = U512::min_value(); - let value = min.wrapping_sub(&1.into()); - assert_eq!(value, U512::max_value()); - } - - #[test] - fn wrapping_test_u256() { - let max = U256::max_value(); - let value = max.wrapping_add(&1.into()); - assert_eq!(value, 0.into()); - - let min = U256::min_value(); - let value = min.wrapping_sub(&1.into()); - assert_eq!(value, U256::max_value()); - } - - #[test] - fn wrapping_test_u128() { - let max = U128::max_value(); - let value = max.wrapping_add(&1.into()); - assert_eq!(value, 0.into()); - - let min = U128::min_value(); - let value = min.wrapping_sub(&1.into()); - assert_eq!(value, U128::max_value()); - } - - fn serde_roundtrip(value: T) { - { - let serialized = bincode::serialize(&value).unwrap(); - let deserialized = bincode::deserialize(serialized.as_slice()).unwrap(); - assert_eq!(value, deserialized); - } - { - let serialized = serde_json::to_string_pretty(&value).unwrap(); - let deserialized = serde_json::from_str(&serialized).unwrap(); - assert_eq!(value, deserialized); - } - } - - #[test] - fn serde_roundtrip_u512() { - serde_roundtrip(U512::min_value()); - serde_roundtrip(U512::from(1)); - serde_roundtrip(U512::from(u64::max_value())); - serde_roundtrip(U512::max_value()); - } - - #[test] - fn serde_roundtrip_u256() { - serde_roundtrip(U256::min_value()); - serde_roundtrip(U256::from(1)); - serde_roundtrip(U256::from(u64::max_value())); - serde_roundtrip(U256::max_value()); - } - - #[test] - fn serde_roundtrip_u128() { - serde_roundtrip(U128::min_value()); - serde_roundtrip(U128::from(1)); - serde_roundtrip(U128::from(u64::max_value())); - serde_roundtrip(U128::max_value()); - } -} diff --git a/casper_types/src/uref.rs b/casper_types/src/uref.rs deleted file mode 100644 index be673e5d..00000000 --- a/casper_types/src/uref.rs +++ /dev/null @@ -1,427 +0,0 @@ -// TODO - remove once schemars stops causing warning. -#![allow(clippy::field_reassign_with_default)] - -use alloc::{format, string::String, vec::Vec}; -use core::{ - array::TryFromSliceError, - convert::TryFrom, - fmt::{self, Debug, Display, Formatter}, - num::ParseIntError, -}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -use rand::{ - distributions::{Distribution, Standard}, - Rng, -}; -#[cfg(feature = "json-schema")] -use schemars::{gen::SchemaGenerator, schema::Schema, JsonSchema}; -use serde::{de::Error as SerdeError, Deserialize, Deserializer, Serialize, Serializer}; - -use crate::{ - bytesrepr, - bytesrepr::{Error, FromBytes}, - checksummed_hex, AccessRights, ApiError, Key, ACCESS_RIGHTS_SERIALIZED_LENGTH, -}; - -/// The number of bytes in a [`URef`] address. -pub const UREF_ADDR_LENGTH: usize = 32; - -/// The number of bytes in a serialized [`URef`] where the [`AccessRights`] are not `None`. -pub const UREF_SERIALIZED_LENGTH: usize = UREF_ADDR_LENGTH + ACCESS_RIGHTS_SERIALIZED_LENGTH; - -pub(super) const UREF_FORMATTED_STRING_PREFIX: &str = "uref-"; - -/// The address of a `URef` (unforgeable reference) on the network. -pub type URefAddr = [u8; UREF_ADDR_LENGTH]; - -/// Error while parsing a URef from a formatted string. -#[derive(Debug)] -#[non_exhaustive] -pub enum FromStrError { - /// Prefix is not "uref-". - InvalidPrefix, - /// No access rights as suffix. - MissingSuffix, - /// Access rights are invalid. - InvalidAccessRights, - /// Failed to decode address portion of URef. - Hex(base16::DecodeError), - /// Failed to parse an int. - Int(ParseIntError), - /// The address portion is the wrong length. - Address(TryFromSliceError), -} - -impl From for FromStrError { - fn from(error: base16::DecodeError) -> Self { - FromStrError::Hex(error) - } -} - -impl From for FromStrError { - fn from(error: ParseIntError) -> Self { - FromStrError::Int(error) - } -} - -impl From for FromStrError { - fn from(error: TryFromSliceError) -> Self { - FromStrError::Address(error) - } -} - -impl Display for FromStrError { - fn fmt(&self, f: &mut Formatter) -> fmt::Result { - match self { - FromStrError::InvalidPrefix => write!(f, "prefix is not 'uref-'"), - FromStrError::MissingSuffix => write!(f, "no access rights as suffix"), - FromStrError::InvalidAccessRights => write!(f, "invalid access rights"), - FromStrError::Hex(error) => { - write!(f, "failed to decode address portion from hex: {}", error) - } - FromStrError::Int(error) => write!(f, "failed to parse an int: {}", error), - FromStrError::Address(error) => { - write!(f, "address portion is the wrong length: {}", error) - } - } - } -} - -/// Represents an unforgeable reference, containing an address in the network's global storage and -/// the [`AccessRights`] of the reference. -/// -/// A `URef` can be used to index entities such as [`CLValue`](crate::CLValue)s, or smart contracts. -#[derive(Copy, Clone, Hash, PartialEq, Eq, PartialOrd, Ord, Default)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -pub struct URef(URefAddr, AccessRights); - -impl URef { - /// Constructs a [`URef`] from an address and access rights. - pub const fn new(address: URefAddr, access_rights: AccessRights) -> Self { - URef(address, access_rights) - } - - /// Returns the address of this [`URef`]. - pub fn addr(&self) -> URefAddr { - self.0 - } - - /// Returns the access rights of this [`URef`]. - pub fn access_rights(&self) -> AccessRights { - self.1 - } - - /// Returns a new [`URef`] with the same address and updated access rights. - #[must_use] - pub fn with_access_rights(self, access_rights: AccessRights) -> Self { - URef(self.0, access_rights) - } - - /// Removes the access rights from this [`URef`]. - #[must_use] - pub fn remove_access_rights(self) -> Self { - URef(self.0, AccessRights::NONE) - } - - /// Returns `true` if the access rights are `Some` and - /// [`is_readable`](AccessRights::is_readable) is `true` for them. - #[must_use] - pub fn is_readable(self) -> bool { - self.1.is_readable() - } - - /// Returns a new [`URef`] with the same address and [`AccessRights::READ`] permission. - #[must_use] - pub fn into_read(self) -> URef { - URef(self.0, AccessRights::READ) - } - - /// Returns a new [`URef`] with the same address and [`AccessRights::WRITE`] permission. - #[must_use] - pub fn into_write(self) -> URef { - URef(self.0, AccessRights::WRITE) - } - - /// Returns a new [`URef`] with the same address and [`AccessRights::ADD`] permission. - #[must_use] - pub fn into_add(self) -> URef { - URef(self.0, AccessRights::ADD) - } - - /// Returns a new [`URef`] with the same address and [`AccessRights::READ_ADD_WRITE`] - /// permission. - #[must_use] - pub fn into_read_add_write(self) -> URef { - URef(self.0, AccessRights::READ_ADD_WRITE) - } - - /// Returns a new [`URef`] with the same address and [`AccessRights::READ_WRITE`] - /// permission. - #[must_use] - pub fn into_read_write(self) -> URef { - URef(self.0, AccessRights::READ_WRITE) - } - - /// Returns `true` if the access rights are `Some` and - /// [`is_writeable`](AccessRights::is_writeable) is `true` for them. - pub fn is_writeable(self) -> bool { - self.1.is_writeable() - } - - /// Returns `true` if the access rights are `Some` and [`is_addable`](AccessRights::is_addable) - /// is `true` for them. - pub fn is_addable(self) -> bool { - self.1.is_addable() - } - - /// Formats the address and access rights of the [`URef`] in a unique way that could be used as - /// a name when storing the given `URef` in a global state. - pub fn to_formatted_string(self) -> String { - // Extract bits as numerical value, with no flags marked as 0. - let access_rights_bits = self.access_rights().bits(); - // Access rights is represented as octal, which means that max value of u8 can - // be represented as maximum of 3 octal digits. - format!( - "{}{}-{:03o}", - UREF_FORMATTED_STRING_PREFIX, - base16::encode_lower(&self.addr()), - access_rights_bits - ) - } - - /// Parses a string formatted as per `Self::to_formatted_string()` into a `URef`. - pub fn from_formatted_str(input: &str) -> Result { - let remainder = input - .strip_prefix(UREF_FORMATTED_STRING_PREFIX) - .ok_or(FromStrError::InvalidPrefix)?; - let parts = remainder.splitn(2, '-').collect::>(); - if parts.len() != 2 { - return Err(FromStrError::MissingSuffix); - } - let addr = URefAddr::try_from(checksummed_hex::decode(parts[0])?.as_ref())?; - let access_rights_value = u8::from_str_radix(parts[1], 8)?; - let access_rights = AccessRights::from_bits(access_rights_value) - .ok_or(FromStrError::InvalidAccessRights)?; - Ok(URef(addr, access_rights)) - } - - /// Removes specific access rights from this URef if present. - pub fn disable_access_rights(&mut self, access_rights: AccessRights) { - self.1.remove(access_rights) - } -} - -#[cfg(feature = "json-schema")] -impl JsonSchema for URef { - fn schema_name() -> String { - String::from("URef") - } - - fn json_schema(gen: &mut SchemaGenerator) -> Schema { - let schema = gen.subschema_for::(); - let mut schema_object = schema.into_object(); - schema_object.metadata().description = Some(String::from("Hex-encoded, formatted URef.")); - schema_object.into() - } -} - -impl Display for URef { - fn fmt(&self, f: &mut Formatter) -> fmt::Result { - let addr = self.addr(); - let access_rights = self.access_rights(); - write!( - f, - "URef({}, {})", - base16::encode_lower(&addr), - access_rights - ) - } -} - -impl Debug for URef { - fn fmt(&self, f: &mut Formatter) -> fmt::Result { - write!(f, "{}", self) - } -} - -impl bytesrepr::ToBytes for URef { - fn to_bytes(&self) -> Result, Error> { - let mut result = bytesrepr::unchecked_allocate_buffer(self); - result.append(&mut self.0.to_bytes()?); - result.append(&mut self.1.to_bytes()?); - Ok(result) - } - - fn serialized_length(&self) -> usize { - UREF_SERIALIZED_LENGTH - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), self::Error> { - writer.extend_from_slice(&self.0); - self.1.write_bytes(writer)?; - Ok(()) - } -} - -impl FromBytes for URef { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (id, rem) = FromBytes::from_bytes(bytes)?; - let (access_rights, rem) = FromBytes::from_bytes(rem)?; - Ok((URef(id, access_rights), rem)) - } -} - -impl Serialize for URef { - fn serialize(&self, serializer: S) -> Result { - if serializer.is_human_readable() { - self.to_formatted_string().serialize(serializer) - } else { - (self.0, self.1).serialize(serializer) - } - } -} - -impl<'de> Deserialize<'de> for URef { - fn deserialize>(deserializer: D) -> Result { - if deserializer.is_human_readable() { - let formatted_string = String::deserialize(deserializer)?; - URef::from_formatted_str(&formatted_string).map_err(D::Error::custom) - } else { - let (address, access_rights) = <(URefAddr, AccessRights)>::deserialize(deserializer)?; - Ok(URef(address, access_rights)) - } - } -} - -impl TryFrom for URef { - type Error = ApiError; - - fn try_from(key: Key) -> Result { - if let Key::URef(uref) = key { - Ok(uref) - } else { - Err(ApiError::UnexpectedKeyVariant) - } - } -} - -impl Distribution for Standard { - fn sample(&self, rng: &mut R) -> URef { - URef::new(rng.gen(), rng.gen()) - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn uref_as_string() { - // Since we are putting URefs to named_keys map keyed by the label that - // `as_string()` returns, any changes to the string representation of - // that type cannot break the format. - let addr_array = [0u8; 32]; - let uref_a = URef::new(addr_array, AccessRights::READ); - assert_eq!( - uref_a.to_formatted_string(), - "uref-0000000000000000000000000000000000000000000000000000000000000000-001" - ); - let uref_b = URef::new(addr_array, AccessRights::WRITE); - assert_eq!( - uref_b.to_formatted_string(), - "uref-0000000000000000000000000000000000000000000000000000000000000000-002" - ); - - let uref_c = uref_b.remove_access_rights(); - assert_eq!( - uref_c.to_formatted_string(), - "uref-0000000000000000000000000000000000000000000000000000000000000000-000" - ); - } - - fn round_trip(uref: URef) { - let string = uref.to_formatted_string(); - let parsed_uref = URef::from_formatted_str(&string).unwrap(); - assert_eq!(uref, parsed_uref); - } - - #[test] - fn uref_from_str() { - round_trip(URef::new([0; 32], AccessRights::NONE)); - round_trip(URef::new([255; 32], AccessRights::READ_ADD_WRITE)); - - let invalid_prefix = - "ref-0000000000000000000000000000000000000000000000000000000000000000-000"; - assert!(URef::from_formatted_str(invalid_prefix).is_err()); - - let invalid_prefix = - "uref0000000000000000000000000000000000000000000000000000000000000000-000"; - assert!(URef::from_formatted_str(invalid_prefix).is_err()); - - let short_addr = "uref-00000000000000000000000000000000000000000000000000000000000000-000"; - assert!(URef::from_formatted_str(short_addr).is_err()); - - let long_addr = - "uref-000000000000000000000000000000000000000000000000000000000000000000-000"; - assert!(URef::from_formatted_str(long_addr).is_err()); - - let invalid_hex = - "uref-000000000000000000000000000000000000000000000000000000000000000g-000"; - assert!(URef::from_formatted_str(invalid_hex).is_err()); - - let invalid_suffix_separator = - "uref-0000000000000000000000000000000000000000000000000000000000000000:000"; - assert!(URef::from_formatted_str(invalid_suffix_separator).is_err()); - - let invalid_suffix = - "uref-0000000000000000000000000000000000000000000000000000000000000000-abc"; - assert!(URef::from_formatted_str(invalid_suffix).is_err()); - - let invalid_access_rights = - "uref-0000000000000000000000000000000000000000000000000000000000000000-200"; - assert!(URef::from_formatted_str(invalid_access_rights).is_err()); - } - - #[test] - fn serde_roundtrip() { - let uref = URef::new([255; 32], AccessRights::READ_ADD_WRITE); - let serialized = bincode::serialize(&uref).unwrap(); - let decoded = bincode::deserialize(&serialized).unwrap(); - assert_eq!(uref, decoded); - } - - #[test] - fn json_roundtrip() { - let uref = URef::new([255; 32], AccessRights::READ_ADD_WRITE); - let json_string = serde_json::to_string_pretty(&uref).unwrap(); - let decoded = serde_json::from_str(&json_string).unwrap(); - assert_eq!(uref, decoded); - } - - #[test] - fn should_disable_access_rights() { - let mut uref = URef::new([255; 32], AccessRights::READ_ADD_WRITE); - assert!(uref.is_writeable()); - uref.disable_access_rights(AccessRights::WRITE); - assert_eq!(uref.access_rights(), AccessRights::READ_ADD); - - uref.disable_access_rights(AccessRights::WRITE); - assert!( - !uref.is_writeable(), - "Disabling access bit twice should be a noop" - ); - - assert_eq!(uref.access_rights(), AccessRights::READ_ADD); - - uref.disable_access_rights(AccessRights::READ_ADD); - assert_eq!(uref.access_rights(), AccessRights::NONE); - - uref.disable_access_rights(AccessRights::READ_ADD); - assert_eq!(uref.access_rights(), AccessRights::NONE); - - uref.disable_access_rights(AccessRights::NONE); - assert_eq!(uref.access_rights(), AccessRights::NONE); - } -} diff --git a/casper_types/tests/version_numbers.rs b/casper_types/tests/version_numbers.rs deleted file mode 100644 index 5787cf50..00000000 --- a/casper_types/tests/version_numbers.rs +++ /dev/null @@ -1,5 +0,0 @@ -#[cfg(feature = "version-sync")] -#[test] -fn test_html_root_url() { - version_sync::assert_html_root_url_updated!("src/lib.rs"); -} diff --git a/casper_types_ver_2_0/CHANGELOG.md b/casper_types_ver_2_0/CHANGELOG.md deleted file mode 100644 index a50736b6..00000000 --- a/casper_types_ver_2_0/CHANGELOG.md +++ /dev/null @@ -1,204 +0,0 @@ -# Changelog - -All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog]. - -[comment]: <> (Added: new features) -[comment]: <> (Changed: changes in existing functionality) -[comment]: <> (Deprecated: soon-to-be removed features) -[comment]: <> (Removed: now removed features) -[comment]: <> (Fixed: any bug fixes) -[comment]: <> (Security: in case of vulnerabilities) - - - -## [Unreleased] (node 2.0) - -### Added -* Add new `EntryPointType::Install`, `EntryPointType::Normal`, `EntryPointAccess::Abstract` variants to support implementation of a factory pattern. - - - -## [Unreleased] (node 1.5.4) - -### Added -* Add a new `SyncHandling` enum, which allows a node to opt out of historical sync. - -### Changed -* Update `k256` to version 0.13.1. - -### Security -* Update `ed25519-dalek` to version 2.0.0 as mitigation for [RUSTSEC-2022-0093](https://rustsec.org/advisories/RUSTSEC-2022-0093) - - - -## 3.0.0 - -### Added -* Add new `bytesrepr::Error::NotRepresentable` error variant that represents values that are not representable by the serialization format. -* Add new `Key::Unbond` key variant under which the new unbonding information (to support redelegation) is written. -* Add new `Key::ChainspecRegistry` key variant under which the `ChainspecRegistry` is written. -* Add new `Key::ChecksumRegistry` key variant under which a registry of checksums for a given block is written. There are two checksums in the registry, one for the execution results and the other for the approvals of all deploys in the block. -* Add new `StoredValue::Unbonding` variant to support redelegating. -* Add a new type `WithdrawPurses` which is meant to represent `UnbondingPurses` as they exist in current live networks. - -### Changed -* Extend `UnbondingPurse` to take a new field `new_validator` which represents the validator to whom tokens will be re-delegated. -* Increase `DICTIONARY_ITEM_KEY_MAX_LENGTH` to 128. -* Change prefix of formatted string representation of `ContractPackageHash` from "contract-package-wasm" to "contract-package-". Parsing from the old format is still supported. -* Apply `#[non_exhaustive]` to error enums. -* Change Debug output of `DeployHash` to hex-encoded string rather than a list of integers. - -### Fixed -* Fix some integer casts, where failure is now detected and reported via new error variant `NotRepresentable`. - - - -## 2.0.0 - -### Fixed -* Republish v1.6.0 as v2.0.0 due to missed breaking change in API (addition of new variant to `Key`). - - - -## 1.6.0 [YANKED] - -### Added -* Extend asymmetric key functionality, available via feature `std` (moved from `casper-nodes` crate). -* Provide `Timestamp` and `TimeDiff` types for time operations, with extended functionality available via feature `std` (moved from `casper-nodes` crate). -* Provide test-only functionality, in particular a seedable RNG `TestRng` which outputs its seed on test failure. Available via a new feature `testing`. -* Add new `Key::EraSummary` key variant under which the era summary info is written on each switch block execution. - -### Deprecated -* Deprecate `gens` feature: its functionality is included in the new `testing` feature. - - - -## 1.5.0 - -### Added -* Provide types and functionality to support improved access control inside execution engine. -* Provide `CLTyped` impl for `ContractPackage` to allow it to be passed into contracts. - -### Fixed -* Limit parsing of CLTyped objects to a maximum of 50 types deep. - - - -## 1.4.6 - 2021-12-29 - -### Changed -* Disable checksummed-hex encoding, but leave checksummed-hex decoding in place. - - - -## 1.4.5 - 2021-12-06 - -### Added -* Add function to `auction::MintProvider` trait to support minting into an existing purse. - -### Changed -* Change checksummed hex implementation to use 32 byte rather than 64 byte blake2b digests. - - - -## [1.4.4] - 2021-11-18 - -### Fixed -* Revert the accidental change to the `std` feature causing a broken build when this feature is enabled. - - - -## [1.4.3] - 2021-11-17 [YANKED] - - - -## [1.4.2] - 2021-11-13 [YANKED] - -### Added -* Add checksummed hex encoding following a scheme similar to [EIP-55](https://eips.ethereum.org/EIPS/eip-55). - - - -## [1.4.1] - 2021-10-23 - -No changes. - - - -## [1.4.0] - 2021-10-21 [YANKED] - -### Added -* Add `json-schema` feature, disabled by default, to enable many types to be used to produce JSON-schema data. -* Add implicit `datasize` feature, disabled by default, to enable many types to derive the `DataSize` trait. -* Add `StoredValue` types to this crate. - -### Changed -* Support building and testing using stable Rust. -* Allow longer hex string to be presented in `json` files. Current maximum is increased from 100 to 150 characters. -* Improve documentation and `Debug` impls for `ApiError`. - -### Deprecated -* Feature `std` is deprecated as it is now a no-op, since there is no benefit to linking the std lib via this crate. - - - -## [1.3.0] - 2021-07-19 - -### Changed -* Restrict summarization when JSON pretty-printing to contiguous long hex strings. -* Update pinned version of Rust to `nightly-2021-06-17`. - -### Removed -* Remove ability to clone `SecretKey`s. - - - -## [1.2.0] - 2021-05-27 - -### Changed -* Change to Apache 2.0 license. -* Return a `Result` from the constructor of `SecretKey` rather than potentially panicking. -* Improve `Key` error reporting and tests. - -### Fixed -* Fix `Key` deserialization. - - - -## [1.1.1] - 2021-04-19 - -No changes. - - - -## [1.1.0] - 2021-04-13 [YANKED] - -No changes. - - - -## [1.0.1] - 2021-04-08 - -No changes. - - - -## [1.0.0] - 2021-03-30 - -### Added -* Initial release of types for use by software compatible with Casper mainnet. - - - -[Keep a Changelog]: https://keepachangelog.com/en/1.0.0 -[unreleased]: https://github.com/casper-network/casper-node/compare/24fc4027a...dev -[1.4.3]: https://github.com/casper-network/casper-node/compare/2be27b3f5...24fc4027a -[1.4.2]: https://github.com/casper-network/casper-node/compare/v1.4.1...2be27b3f5 -[1.4.1]: https://github.com/casper-network/casper-node/compare/v1.4.0...v1.4.1 -[1.4.0]: https://github.com/casper-network/casper-node/compare/v1.3.0...v1.4.0 -[1.3.0]: https://github.com/casper-network/casper-node/compare/v1.2.0...v1.3.0 -[1.2.0]: https://github.com/casper-network/casper-node/compare/v1.1.1...v1.2.0 -[1.1.1]: https://github.com/casper-network/casper-node/compare/v1.0.1...v1.1.1 -[1.1.0]: https://github.com/casper-network/casper-node/compare/v1.0.1...v1.1.1 -[1.0.1]: https://github.com/casper-network/casper-node/compare/v1.0.0...v1.0.1 -[1.0.0]: https://github.com/casper-network/casper-node/releases/tag/v1.0.0 diff --git a/casper_types_ver_2_0/Cargo.toml b/casper_types_ver_2_0/Cargo.toml deleted file mode 100644 index 6e19e08f..00000000 --- a/casper_types_ver_2_0/Cargo.toml +++ /dev/null @@ -1,89 +0,0 @@ -[package] -name = "casper-types-ver-2_0" -version = "3.0.0" # when updating, also update 'html_root_url' in lib.rs -authors = ["Fraser Hutchison "] -edition = "2018" -description = "Types shared by many casper crates for use on the Casper network." -readme = "README.md" -documentation = "https://docs.rs/casper-types" -homepage = "https://casperlabs.io" -repository = "https://github.com/CasperLabs/casper-node/tree/master/types" -license = "Apache-2.0" - -[dependencies] -base16 = { version = "0.2.1", default-features = false, features = ["alloc"] } -base64 = { version = "0.13.0", default-features = false } -bincode = { version = "1.3.1", optional = true } -bitflags = "1" -blake2 = { version = "0.9.0", default-features = false } -datasize = { workspace = true, optional = true } -derive_more = "0.99.17" -derp = { version = "0.0.14", optional = true } -ed25519-dalek = { version = "2.0.0", default-features = false, features = ["alloc", "zeroize"] } -getrandom = { version = "0.2.0", features = ["rdrand"], optional = true } -hex = { version = "0.4.2", default-features = false, features = ["alloc"] } -hex_fmt = "0.3.0" -humantime = { version = "2", optional = true } -itertools = { version = "0.10.3", default-features = false } -k256 = { version = "0.13.1", default-features = false, features = ["ecdsa", "sha256"] } -libc = { version = "0.2.146", optional = true, default-features = false } -num = { version = "0.4.0", default-features = false, features = ["alloc"] } -num-derive = { version = "0.3.0", default-features = false } -num-integer = { version = "0.1.42", default-features = false } -num-rational = { version = "0.4.0", default-features = false, features = ["serde"] } -num-traits = { version = "0.2.10", default-features = false } -once_cell = { workspace = true, optional = true } -pem = { version = "0.8.1", optional = true } -proptest = { version = "1.0.0", optional = true } -proptest-derive = { version = "0.3.0", optional = true } -rand = { version = "0.8.3", default-features = false, features = ["small_rng"] } -rand_pcg = { version = "0.3.0", optional = true } -schemars = { version = "0.8.16", features = ["preserve_order"], optional = true } -serde = { workspace = true, default-features = false, features = ["alloc", "derive"] } -serde-map-to-array = "1.1.0" -serde_bytes = { version = "0.11.5", default-features = false, features = ["alloc"] } -serde_json = { version = "1.0.59", default-features = false, features = ["alloc"] } -strum = { version = "0.24", features = ["derive"], optional = true } -thiserror = { workspace = true, optional = true } -tracing = { workspace = true, default-features = false } -uint = { version = "0.9.0", default-features = false } -untrusted = { version = "0.7.1", optional = true } -version-sync = { version = "0.9", optional = true } - -[dev-dependencies] -base16 = { version = "0.2.1", features = ["std"] } -bincode = "1.3.1" -criterion = "0.3.5" -derp = "0.0.14" -getrandom = "0.2.0" -humantime = "2" -once_cell = { workspace = true } -openssl = "0.10.55" -pem = "0.8.1" -proptest = "1.0.0" -proptest-attr-macro = "1.0.0" -proptest-derive = "0.3.0" -rand = "0.8.3" -rand_pcg = "0.3.0" -serde_json = "1" -serde_test = "1" -strum = { version = "0.24", features = ["derive"] } -tempfile = "3.4.0" -thiserror = { workspace = true } -untrusted = "0.7.1" - -[features] -json-schema = ["once_cell", "schemars", "serde-map-to-array/json-schema"] -std = ["base16/std", "derp", "getrandom/std", "humantime", "itertools/use_std", "libc", "once_cell", "pem", "serde_json/preserve_order", "thiserror", "untrusted"] -testing = ["proptest", "proptest-derive", "rand/default", "rand_pcg", "strum", "bincode"] -# DEPRECATED - use "testing" instead of "gens". -gens = ["testing"] - -[[bench]] -name = "bytesrepr_bench" -harness = false -required-features = ["testing"] - -[package.metadata.docs.rs] -all-features = true -rustdoc-args = ["--cfg", "docsrs"] diff --git a/casper_types_ver_2_0/README.md b/casper_types_ver_2_0/README.md deleted file mode 100644 index 46f14ea2..00000000 --- a/casper_types_ver_2_0/README.md +++ /dev/null @@ -1,22 +0,0 @@ -# `casper-types` - -[![LOGO](https://raw.githubusercontent.com/casper-network/casper-node/master/images/casper-association-logo-primary.svg)](https://casper.network/) - -[![Build Status](https://drone-auto-casper-network.casperlabs.io/api/badges/casper-network/casper-node/status.svg?branch=dev)](http://drone-auto-casper-network.casperlabs.io/casper-network/casper-node) -[![Crates.io](https://img.shields.io/crates/v/casper-types)](https://crates.io/crates/casper-types) -[![Documentation](https://docs.rs/casper-types/badge.svg)](https://docs.rs/casper-types) -[![License](https://img.shields.io/badge/license-Apache-blue)](https://github.com/CasperLabs/casper-node/blob/master/LICENSE) - -Types shared by many casper crates for use on the Casper network. - -## `no_std` - -The crate is `no_std` (using the `core` and `alloc` crates) unless any of the following features are enabled: - -* `json-schema` to enable many types to be used to produce JSON-schema data via the [`schemars`](https://crates.io/crates/schemars) crate -* `datasize` to enable many types to derive the [`DataSize`](https://github.com/casperlabs/datasize-rs) trait -* `gens` to enable many types to be produced in accordance with [`proptest`](https://crates.io/crates/proptest) usage for consumption within dependee crates' property testing suites - -## License - -Licensed under the [Apache License Version 2.0](https://github.com/casper-network/casper-node/blob/master/LICENSE). diff --git a/casper_types_ver_2_0/benches/bytesrepr_bench.rs b/casper_types_ver_2_0/benches/bytesrepr_bench.rs deleted file mode 100644 index 491cecba..00000000 --- a/casper_types_ver_2_0/benches/bytesrepr_bench.rs +++ /dev/null @@ -1,872 +0,0 @@ -use criterion::{black_box, criterion_group, criterion_main, Bencher, Criterion}; - -use std::{ - collections::{BTreeMap, BTreeSet}, - iter, -}; - -use casper_types_ver_2_0::{ - account::AccountHash, - addressable_entity::{ - ActionThresholds, AddressableEntity, AssociatedKeys, MessageTopics, NamedKeys, - }, - bytesrepr::{self, Bytes, FromBytes, ToBytes}, - package::{PackageKind, PackageStatus}, - system::auction::{Bid, Delegator, EraInfo, SeigniorageAllocation}, - AccessRights, AddressableEntityHash, ByteCodeHash, CLType, CLTyped, CLValue, DeployHash, - DeployInfo, EntityVersionKey, EntityVersions, EntryPoint, EntryPointAccess, EntryPointType, - EntryPoints, Group, Groups, Key, Package, PackageHash, Parameter, ProtocolVersion, PublicKey, - SecretKey, Transfer, TransferAddr, URef, KEY_HASH_LENGTH, TRANSFER_ADDR_LENGTH, U128, U256, - U512, UREF_ADDR_LENGTH, -}; - -static KB: usize = 1024; -static BATCH: usize = 4 * KB; - -const TEST_I32: i32 = 123_456_789; -const TEST_U128: U128 = U128([123_456_789, 0]); -const TEST_U256: U256 = U256([123_456_789, 0, 0, 0]); -const TEST_U512: U512 = U512([123_456_789, 0, 0, 0, 0, 0, 0, 0]); -const TEST_STR_1: &str = "String One"; -const TEST_STR_2: &str = "String Two"; - -fn prepare_vector(size: usize) -> Vec { - (0..size as i32).collect() -} - -fn serialize_vector_of_i32s(b: &mut Bencher) { - let data = prepare_vector(black_box(BATCH)); - b.iter(|| data.to_bytes()); -} - -fn deserialize_vector_of_i32s(b: &mut Bencher) { - let data = prepare_vector(black_box(BATCH)).to_bytes().unwrap(); - b.iter(|| { - let (res, _rem): (Vec, _) = FromBytes::from_bytes(&data).unwrap(); - res - }); -} - -fn serialize_vector_of_u8(b: &mut Bencher) { - // 0, 1, ... 254, 255, 0, 1, ... - let data: Bytes = prepare_vector(BATCH) - .into_iter() - .map(|value| value as u8) - .collect(); - b.iter(|| ToBytes::to_bytes(black_box(&data))); -} - -fn deserialize_vector_of_u8(b: &mut Bencher) { - // 0, 1, ... 254, 255, 0, 1, ... - let data: Vec = prepare_vector(BATCH) - .into_iter() - .map(|value| value as u8) - .collect::() - .to_bytes() - .unwrap(); - b.iter(|| Bytes::from_bytes(black_box(&data))) -} - -fn serialize_u8(b: &mut Bencher) { - b.iter(|| ToBytes::to_bytes(black_box(&129u8))); -} - -fn deserialize_u8(b: &mut Bencher) { - b.iter(|| u8::from_bytes(black_box(&[129u8]))); -} - -fn serialize_i32(b: &mut Bencher) { - b.iter(|| ToBytes::to_bytes(black_box(&1_816_142_132i32))); -} - -fn deserialize_i32(b: &mut Bencher) { - b.iter(|| i32::from_bytes(black_box(&[0x34, 0x21, 0x40, 0x6c]))); -} - -fn serialize_u64(b: &mut Bencher) { - b.iter(|| ToBytes::to_bytes(black_box(&14_157_907_845_468_752_670u64))); -} - -fn deserialize_u64(b: &mut Bencher) { - b.iter(|| u64::from_bytes(black_box(&[0x1e, 0x8b, 0xe1, 0x73, 0x2c, 0xfe, 0x7a, 0xc4]))); -} - -fn serialize_some_u64(b: &mut Bencher) { - let data = Some(14_157_907_845_468_752_670u64); - - b.iter(|| ToBytes::to_bytes(black_box(&data))); -} - -fn deserialize_some_u64(b: &mut Bencher) { - let data = Some(14_157_907_845_468_752_670u64); - let data = data.to_bytes().unwrap(); - - b.iter(|| Option::::from_bytes(&data)); -} - -fn serialize_none_u64(b: &mut Bencher) { - let data: Option = None; - - b.iter(|| ToBytes::to_bytes(black_box(&data))); -} - -fn deserialize_ok_u64(b: &mut Bencher) { - let data: Option = None; - let data = data.to_bytes().unwrap(); - b.iter(|| Option::::from_bytes(&data)); -} - -fn make_test_vec_of_vec8() -> Vec { - (0..4) - .map(|_v| { - // 0, 1, 2, ..., 254, 255 - let inner_vec = iter::repeat_with(|| 0..255u8) - .flatten() - // 4 times to create 4x 1024 bytes - .take(4) - .collect::>(); - Bytes::from(inner_vec) - }) - .collect() -} - -fn serialize_vector_of_vector_of_u8(b: &mut Bencher) { - let data = make_test_vec_of_vec8(); - b.iter(|| data.to_bytes()); -} - -fn deserialize_vector_of_vector_of_u8(b: &mut Bencher) { - let data = make_test_vec_of_vec8().to_bytes().unwrap(); - b.iter(|| Vec::::from_bytes(black_box(&data))); -} - -fn serialize_tree_map(b: &mut Bencher) { - let data = { - let mut res = BTreeMap::new(); - res.insert("asdf".to_string(), "zxcv".to_string()); - res.insert("qwer".to_string(), "rewq".to_string()); - res.insert("1234".to_string(), "5678".to_string()); - res - }; - - b.iter(|| ToBytes::to_bytes(black_box(&data))); -} - -fn deserialize_treemap(b: &mut Bencher) { - let data = { - let mut res = BTreeMap::new(); - res.insert("asdf".to_string(), "zxcv".to_string()); - res.insert("qwer".to_string(), "rewq".to_string()); - res.insert("1234".to_string(), "5678".to_string()); - res - }; - let data = data.to_bytes().unwrap(); - b.iter(|| BTreeMap::::from_bytes(black_box(&data))); -} - -fn serialize_string(b: &mut Bencher) { - let lorem = "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua."; - let data = lorem.to_string(); - b.iter(|| ToBytes::to_bytes(black_box(&data))); -} - -fn deserialize_string(b: &mut Bencher) { - let lorem = "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua."; - let data = lorem.to_bytes().unwrap(); - b.iter(|| String::from_bytes(&data)); -} - -fn serialize_vec_of_string(b: &mut Bencher) { - let lorem = "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.".to_string(); - let array_of_lorem: Vec = lorem.split(' ').map(Into::into).collect(); - let data = array_of_lorem; - b.iter(|| ToBytes::to_bytes(black_box(&data))); -} - -fn deserialize_vec_of_string(b: &mut Bencher) { - let lorem = "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.".to_string(); - let array_of_lorem: Vec = lorem.split(' ').map(Into::into).collect(); - let data = array_of_lorem.to_bytes().unwrap(); - - b.iter(|| Vec::::from_bytes(&data)); -} - -fn serialize_unit(b: &mut Bencher) { - b.iter(|| ToBytes::to_bytes(black_box(&()))) -} - -fn deserialize_unit(b: &mut Bencher) { - let data = ().to_bytes().unwrap(); - - b.iter(|| <()>::from_bytes(&data)) -} - -fn serialize_key_account(b: &mut Bencher) { - let account = Key::Account(AccountHash::new([0u8; 32])); - - b.iter(|| ToBytes::to_bytes(black_box(&account))) -} - -fn deserialize_key_account(b: &mut Bencher) { - let account = Key::Account(AccountHash::new([0u8; 32])); - let account_bytes = account.to_bytes().unwrap(); - - b.iter(|| Key::from_bytes(black_box(&account_bytes))) -} - -fn serialize_key_hash(b: &mut Bencher) { - let hash = Key::Hash([0u8; 32]); - b.iter(|| ToBytes::to_bytes(black_box(&hash))) -} - -fn deserialize_key_hash(b: &mut Bencher) { - let hash = Key::Hash([0u8; 32]); - let hash_bytes = hash.to_bytes().unwrap(); - - b.iter(|| Key::from_bytes(black_box(&hash_bytes))) -} - -fn serialize_key_uref(b: &mut Bencher) { - let uref = Key::URef(URef::new([0u8; 32], AccessRights::ADD_WRITE)); - b.iter(|| ToBytes::to_bytes(black_box(&uref))) -} - -fn deserialize_key_uref(b: &mut Bencher) { - let uref = Key::URef(URef::new([0u8; 32], AccessRights::ADD_WRITE)); - let uref_bytes = uref.to_bytes().unwrap(); - - b.iter(|| Key::from_bytes(black_box(&uref_bytes))) -} - -fn serialize_vec_of_keys(b: &mut Bencher) { - let keys: Vec = (0..32) - .map(|i| Key::URef(URef::new([i; 32], AccessRights::ADD_WRITE))) - .collect(); - b.iter(|| ToBytes::to_bytes(black_box(&keys))) -} - -fn deserialize_vec_of_keys(b: &mut Bencher) { - let keys: Vec = (0..32) - .map(|i| Key::URef(URef::new([i; 32], AccessRights::ADD_WRITE))) - .collect(); - let keys_bytes = keys.to_bytes().unwrap(); - b.iter(|| Vec::::from_bytes(black_box(&keys_bytes))); -} - -fn serialize_access_rights_read(b: &mut Bencher) { - b.iter(|| AccessRights::READ.to_bytes()); -} - -fn deserialize_access_rights_read(b: &mut Bencher) { - let data = AccessRights::READ.to_bytes().unwrap(); - b.iter(|| AccessRights::from_bytes(&data)); -} - -fn serialize_access_rights_write(b: &mut Bencher) { - b.iter(|| AccessRights::WRITE.to_bytes()); -} - -fn deserialize_access_rights_write(b: &mut Bencher) { - let data = AccessRights::WRITE.to_bytes().unwrap(); - b.iter(|| AccessRights::from_bytes(&data)); -} - -fn serialize_access_rights_add(b: &mut Bencher) { - b.iter(|| AccessRights::ADD.to_bytes()); -} - -fn deserialize_access_rights_add(b: &mut Bencher) { - let data = AccessRights::ADD.to_bytes().unwrap(); - b.iter(|| AccessRights::from_bytes(&data)); -} - -fn serialize_access_rights_read_add(b: &mut Bencher) { - b.iter(|| AccessRights::READ_ADD.to_bytes()); -} - -fn deserialize_access_rights_read_add(b: &mut Bencher) { - let data = AccessRights::READ_ADD.to_bytes().unwrap(); - b.iter(|| AccessRights::from_bytes(&data)); -} - -fn serialize_access_rights_read_write(b: &mut Bencher) { - b.iter(|| AccessRights::READ_WRITE.to_bytes()); -} - -fn deserialize_access_rights_read_write(b: &mut Bencher) { - let data = AccessRights::READ_WRITE.to_bytes().unwrap(); - b.iter(|| AccessRights::from_bytes(&data)); -} - -fn serialize_access_rights_add_write(b: &mut Bencher) { - b.iter(|| AccessRights::ADD_WRITE.to_bytes()); -} - -fn deserialize_access_rights_add_write(b: &mut Bencher) { - let data = AccessRights::ADD_WRITE.to_bytes().unwrap(); - b.iter(|| AccessRights::from_bytes(&data)); -} - -fn serialize_cl_value(raw_value: T) -> Vec { - CLValue::from_t(raw_value) - .expect("should create CLValue") - .to_bytes() - .expect("should serialize CLValue") -} - -fn benchmark_deserialization(b: &mut Bencher, raw_value: T) { - let serialized_value = serialize_cl_value(raw_value); - b.iter(|| { - let cl_value: CLValue = bytesrepr::deserialize_from_slice(&serialized_value).unwrap(); - let _raw_value: T = cl_value.into_t().unwrap(); - }); -} - -fn serialize_cl_value_int32(b: &mut Bencher) { - b.iter(|| serialize_cl_value(TEST_I32)); -} - -fn deserialize_cl_value_int32(b: &mut Bencher) { - benchmark_deserialization(b, TEST_I32); -} - -fn serialize_cl_value_uint128(b: &mut Bencher) { - b.iter(|| serialize_cl_value(TEST_U128)); -} - -fn deserialize_cl_value_uint128(b: &mut Bencher) { - benchmark_deserialization(b, TEST_U128); -} - -fn serialize_cl_value_uint256(b: &mut Bencher) { - b.iter(|| serialize_cl_value(TEST_U256)); -} - -fn deserialize_cl_value_uint256(b: &mut Bencher) { - benchmark_deserialization(b, TEST_U256); -} - -fn serialize_cl_value_uint512(b: &mut Bencher) { - b.iter(|| serialize_cl_value(TEST_U512)); -} - -fn deserialize_cl_value_uint512(b: &mut Bencher) { - benchmark_deserialization(b, TEST_U512); -} - -fn serialize_cl_value_bytearray(b: &mut Bencher) { - b.iter_with_setup( - || { - let vec: Vec = (0..255).collect(); - Bytes::from(vec) - }, - serialize_cl_value, - ); -} - -fn deserialize_cl_value_bytearray(b: &mut Bencher) { - let vec = (0..255).collect::>(); - let bytes: Bytes = vec.into(); - benchmark_deserialization(b, bytes); -} - -fn serialize_cl_value_listint32(b: &mut Bencher) { - b.iter(|| serialize_cl_value((0..1024).collect::>())); -} - -fn deserialize_cl_value_listint32(b: &mut Bencher) { - benchmark_deserialization(b, (0..1024).collect::>()); -} - -fn serialize_cl_value_string(b: &mut Bencher) { - b.iter(|| serialize_cl_value(TEST_STR_1.to_string())); -} - -fn deserialize_cl_value_string(b: &mut Bencher) { - benchmark_deserialization(b, TEST_STR_1.to_string()); -} - -fn serialize_cl_value_liststring(b: &mut Bencher) { - b.iter(|| serialize_cl_value(vec![TEST_STR_1.to_string(), TEST_STR_2.to_string()])); -} - -fn deserialize_cl_value_liststring(b: &mut Bencher) { - benchmark_deserialization(b, vec![TEST_STR_1.to_string(), TEST_STR_2.to_string()]); -} - -fn serialize_cl_value_namedkey(b: &mut Bencher) { - b.iter(|| { - serialize_cl_value(( - TEST_STR_1.to_string(), - Key::Account(AccountHash::new([0xffu8; 32])), - )) - }); -} - -fn deserialize_cl_value_namedkey(b: &mut Bencher) { - benchmark_deserialization( - b, - ( - TEST_STR_1.to_string(), - Key::Account(AccountHash::new([0xffu8; 32])), - ), - ); -} - -fn serialize_u128(b: &mut Bencher) { - let num_u128 = U128::default(); - b.iter(|| ToBytes::to_bytes(black_box(&num_u128))) -} - -fn deserialize_u128(b: &mut Bencher) { - let num_u128 = U128::default(); - let num_u128_bytes = num_u128.to_bytes().unwrap(); - - b.iter(|| U128::from_bytes(black_box(&num_u128_bytes))) -} - -fn serialize_u256(b: &mut Bencher) { - let num_u256 = U256::default(); - b.iter(|| ToBytes::to_bytes(black_box(&num_u256))) -} - -fn deserialize_u256(b: &mut Bencher) { - let num_u256 = U256::default(); - let num_u256_bytes = num_u256.to_bytes().unwrap(); - - b.iter(|| U256::from_bytes(black_box(&num_u256_bytes))) -} - -fn serialize_u512(b: &mut Bencher) { - let num_u512 = U512::default(); - b.iter(|| ToBytes::to_bytes(black_box(&num_u512))) -} - -fn deserialize_u512(b: &mut Bencher) { - let num_u512 = U512::default(); - let num_u512_bytes = num_u512.to_bytes().unwrap(); - - b.iter(|| U512::from_bytes(black_box(&num_u512_bytes))) -} - -fn serialize_contract(b: &mut Bencher) { - let contract = sample_contract(10, 10); - b.iter(|| ToBytes::to_bytes(black_box(&contract))); -} - -fn deserialize_contract(b: &mut Bencher) { - let contract = sample_contract(10, 10); - let contract_bytes = AddressableEntity::to_bytes(&contract).unwrap(); - b.iter(|| AddressableEntity::from_bytes(black_box(&contract_bytes)).unwrap()); -} - -fn sample_named_keys(len: u8) -> NamedKeys { - NamedKeys::from( - (0..len) - .map(|i| { - ( - format!("named-key-{}", i), - Key::Account(AccountHash::default()), - ) - }) - .collect::>(), - ) -} - -fn sample_contract(named_keys_len: u8, entry_points_len: u8) -> AddressableEntity { - let named_keys: NamedKeys = sample_named_keys(named_keys_len); - - let entry_points = { - let mut tmp = EntryPoints::new_with_default_entry_point(); - (1..entry_points_len).for_each(|i| { - let args = vec![ - Parameter::new("first", CLType::U32), - Parameter::new("Foo", CLType::U32), - ]; - let entry_point = EntryPoint::new( - format!("test-{}", i), - args, - casper_types_ver_2_0::CLType::U512, - EntryPointAccess::groups(&["Group 2"]), - EntryPointType::AddressableEntity, - ); - tmp.add_entry_point(entry_point); - }); - tmp - }; - - casper_types_ver_2_0::addressable_entity::AddressableEntity::new( - PackageHash::default(), - ByteCodeHash::default(), - named_keys, - entry_points, - ProtocolVersion::default(), - URef::default(), - AssociatedKeys::default(), - ActionThresholds::default(), - MessageTopics::default(), - ) -} - -fn contract_version_key_fn(i: u8) -> EntityVersionKey { - EntityVersionKey::new(i as u32, i as u32) -} - -fn contract_hash_fn(i: u8) -> AddressableEntityHash { - AddressableEntityHash::new([i; KEY_HASH_LENGTH]) -} - -fn sample_map(key_fn: FK, value_fn: FV, count: u8) -> BTreeMap -where - FK: Fn(u8) -> K, - FV: Fn(u8) -> V, -{ - (0..count) - .map(|i| { - let key = key_fn(i); - let value = value_fn(i); - (key, value) - }) - .collect() -} - -fn sample_set(fun: F, count: u8) -> BTreeSet -where - F: Fn(u8) -> K, -{ - (0..count).map(fun).collect() -} - -fn sample_group(i: u8) -> Group { - Group::new(format!("group-{}", i)) -} - -fn sample_uref(i: u8) -> URef { - URef::new([i; UREF_ADDR_LENGTH], AccessRights::all()) -} - -fn sample_contract_package( - contract_versions_len: u8, - disabled_versions_len: u8, - groups_len: u8, -) -> Package { - let access_key = URef::default(); - let versions = EntityVersions::from(sample_map( - contract_version_key_fn, - contract_hash_fn, - contract_versions_len, - )); - let disabled_versions = sample_set(contract_version_key_fn, disabled_versions_len); - let groups = Groups::from(sample_map( - sample_group, - |_| sample_set(sample_uref, 3), - groups_len, - )); - - Package::new( - access_key, - versions, - disabled_versions, - groups, - PackageStatus::Locked, - PackageKind::SmartContract, - ) -} - -fn serialize_contract_package(b: &mut Bencher) { - let contract = sample_contract_package(5, 1, 5); - b.iter(|| Package::to_bytes(black_box(&contract))); -} - -fn deserialize_contract_package(b: &mut Bencher) { - let contract_package = sample_contract_package(5, 1, 5); - let contract_bytes = Package::to_bytes(&contract_package).unwrap(); - b.iter(|| Package::from_bytes(black_box(&contract_bytes)).unwrap()); -} - -fn u32_to_pk(i: u32) -> PublicKey { - let mut sk_bytes = [0u8; 32]; - U256::from(i).to_big_endian(&mut sk_bytes); - let sk = SecretKey::ed25519_from_bytes(sk_bytes).unwrap(); - PublicKey::from(&sk) -} - -fn sample_delegators(delegators_len: u32) -> Vec { - (0..delegators_len) - .map(|i| { - let delegator_pk = u32_to_pk(i); - let staked_amount = U512::from_dec_str("123123123123123").unwrap(); - let bonding_purse = URef::default(); - let validator_pk = u32_to_pk(i); - Delegator::unlocked(delegator_pk, staked_amount, bonding_purse, validator_pk) - }) - .collect() -} - -fn sample_bid(delegators_len: u32) -> Bid { - let validator_public_key = PublicKey::System; - let bonding_purse = URef::default(); - let staked_amount = U512::from_dec_str("123123123123123").unwrap(); - let delegation_rate = 10u8; - let mut bid = Bid::unlocked( - validator_public_key, - bonding_purse, - staked_amount, - delegation_rate, - ); - let new_delegators = sample_delegators(delegators_len); - - let curr_delegators = bid.delegators_mut(); - for delegator in new_delegators.into_iter() { - assert!(curr_delegators - .insert(delegator.delegator_public_key().clone(), delegator) - .is_none()); - } - bid -} - -fn serialize_bid(delegators_len: u32, b: &mut Bencher) { - let bid = sample_bid(delegators_len); - b.iter(|| Bid::to_bytes(black_box(&bid))); -} - -fn deserialize_bid(delegators_len: u32, b: &mut Bencher) { - let bid = sample_bid(delegators_len); - let bid_bytes = Bid::to_bytes(&bid).unwrap(); - b.iter(|| Bid::from_bytes(black_box(&bid_bytes))); -} - -fn sample_transfer() -> Transfer { - Transfer::new( - DeployHash::default(), - AccountHash::default(), - None, - URef::default(), - URef::default(), - U512::MAX, - U512::from_dec_str("123123123123").unwrap(), - Some(1u64), - ) -} - -fn serialize_transfer(b: &mut Bencher) { - let transfer = sample_transfer(); - b.iter(|| Transfer::to_bytes(&transfer)); -} - -fn deserialize_transfer(b: &mut Bencher) { - let transfer = sample_transfer(); - let transfer_bytes = transfer.to_bytes().unwrap(); - b.iter(|| Transfer::from_bytes(&transfer_bytes)); -} - -fn sample_deploy_info(transfer_len: u16) -> DeployInfo { - let transfers = (0..transfer_len) - .map(|i| { - let mut tmp = [0u8; TRANSFER_ADDR_LENGTH]; - U256::from(i).to_little_endian(&mut tmp); - TransferAddr::new(tmp) - }) - .collect::>(); - DeployInfo::new( - DeployHash::default(), - &transfers, - AccountHash::default(), - URef::default(), - U512::MAX, - ) -} - -fn serialize_deploy_info(b: &mut Bencher) { - let deploy_info = sample_deploy_info(1000); - b.iter(|| DeployInfo::to_bytes(&deploy_info)); -} - -fn deserialize_deploy_info(b: &mut Bencher) { - let deploy_info = sample_deploy_info(1000); - let deploy_bytes = deploy_info.to_bytes().unwrap(); - b.iter(|| DeployInfo::from_bytes(&deploy_bytes)); -} - -fn sample_era_info(delegators_len: u32) -> EraInfo { - let mut base = EraInfo::new(); - let delegations = (0..delegators_len).map(|i| { - let pk = u32_to_pk(i); - SeigniorageAllocation::delegator(pk.clone(), pk, U512::MAX) - }); - base.seigniorage_allocations_mut().extend(delegations); - base -} - -fn serialize_era_info(delegators_len: u32, b: &mut Bencher) { - let era_info = sample_era_info(delegators_len); - b.iter(|| EraInfo::to_bytes(&era_info)); -} - -fn deserialize_era_info(delegators_len: u32, b: &mut Bencher) { - let era_info = sample_era_info(delegators_len); - let era_info_bytes = era_info.to_bytes().unwrap(); - b.iter(|| EraInfo::from_bytes(&era_info_bytes)); -} - -fn bytesrepr_bench(c: &mut Criterion) { - c.bench_function("serialize_vector_of_i32s", serialize_vector_of_i32s); - c.bench_function("deserialize_vector_of_i32s", deserialize_vector_of_i32s); - c.bench_function("serialize_vector_of_u8", serialize_vector_of_u8); - c.bench_function("deserialize_vector_of_u8", deserialize_vector_of_u8); - c.bench_function("serialize_u8", serialize_u8); - c.bench_function("deserialize_u8", deserialize_u8); - c.bench_function("serialize_i32", serialize_i32); - c.bench_function("deserialize_i32", deserialize_i32); - c.bench_function("serialize_u64", serialize_u64); - c.bench_function("deserialize_u64", deserialize_u64); - c.bench_function("serialize_some_u64", serialize_some_u64); - c.bench_function("deserialize_some_u64", deserialize_some_u64); - c.bench_function("serialize_none_u64", serialize_none_u64); - c.bench_function("deserialize_ok_u64", deserialize_ok_u64); - c.bench_function( - "serialize_vector_of_vector_of_u8", - serialize_vector_of_vector_of_u8, - ); - c.bench_function( - "deserialize_vector_of_vector_of_u8", - deserialize_vector_of_vector_of_u8, - ); - c.bench_function("serialize_tree_map", serialize_tree_map); - c.bench_function("deserialize_treemap", deserialize_treemap); - c.bench_function("serialize_string", serialize_string); - c.bench_function("deserialize_string", deserialize_string); - c.bench_function("serialize_vec_of_string", serialize_vec_of_string); - c.bench_function("deserialize_vec_of_string", deserialize_vec_of_string); - c.bench_function("serialize_unit", serialize_unit); - c.bench_function("deserialize_unit", deserialize_unit); - c.bench_function("serialize_key_account", serialize_key_account); - c.bench_function("deserialize_key_account", deserialize_key_account); - c.bench_function("serialize_key_hash", serialize_key_hash); - c.bench_function("deserialize_key_hash", deserialize_key_hash); - c.bench_function("serialize_key_uref", serialize_key_uref); - c.bench_function("deserialize_key_uref", deserialize_key_uref); - c.bench_function("serialize_vec_of_keys", serialize_vec_of_keys); - c.bench_function("deserialize_vec_of_keys", deserialize_vec_of_keys); - c.bench_function("serialize_access_rights_read", serialize_access_rights_read); - c.bench_function( - "deserialize_access_rights_read", - deserialize_access_rights_read, - ); - c.bench_function( - "serialize_access_rights_write", - serialize_access_rights_write, - ); - c.bench_function( - "deserialize_access_rights_write", - deserialize_access_rights_write, - ); - c.bench_function("serialize_access_rights_add", serialize_access_rights_add); - c.bench_function( - "deserialize_access_rights_add", - deserialize_access_rights_add, - ); - c.bench_function( - "serialize_access_rights_read_add", - serialize_access_rights_read_add, - ); - c.bench_function( - "deserialize_access_rights_read_add", - deserialize_access_rights_read_add, - ); - c.bench_function( - "serialize_access_rights_read_write", - serialize_access_rights_read_write, - ); - c.bench_function( - "deserialize_access_rights_read_write", - deserialize_access_rights_read_write, - ); - c.bench_function( - "serialize_access_rights_add_write", - serialize_access_rights_add_write, - ); - c.bench_function( - "deserialize_access_rights_add_write", - deserialize_access_rights_add_write, - ); - c.bench_function("serialize_cl_value_int32", serialize_cl_value_int32); - c.bench_function("deserialize_cl_value_int32", deserialize_cl_value_int32); - c.bench_function("serialize_cl_value_uint128", serialize_cl_value_uint128); - c.bench_function("deserialize_cl_value_uint128", deserialize_cl_value_uint128); - c.bench_function("serialize_cl_value_uint256", serialize_cl_value_uint256); - c.bench_function("deserialize_cl_value_uint256", deserialize_cl_value_uint256); - c.bench_function("serialize_cl_value_uint512", serialize_cl_value_uint512); - c.bench_function("deserialize_cl_value_uint512", deserialize_cl_value_uint512); - c.bench_function("serialize_cl_value_bytearray", serialize_cl_value_bytearray); - c.bench_function( - "deserialize_cl_value_bytearray", - deserialize_cl_value_bytearray, - ); - c.bench_function("serialize_cl_value_listint32", serialize_cl_value_listint32); - c.bench_function( - "deserialize_cl_value_listint32", - deserialize_cl_value_listint32, - ); - c.bench_function("serialize_cl_value_string", serialize_cl_value_string); - c.bench_function("deserialize_cl_value_string", deserialize_cl_value_string); - c.bench_function( - "serialize_cl_value_liststring", - serialize_cl_value_liststring, - ); - c.bench_function( - "deserialize_cl_value_liststring", - deserialize_cl_value_liststring, - ); - c.bench_function("serialize_cl_value_namedkey", serialize_cl_value_namedkey); - c.bench_function( - "deserialize_cl_value_namedkey", - deserialize_cl_value_namedkey, - ); - c.bench_function("serialize_u128", serialize_u128); - c.bench_function("deserialize_u128", deserialize_u128); - c.bench_function("serialize_u256", serialize_u256); - c.bench_function("deserialize_u256", deserialize_u256); - c.bench_function("serialize_u512", serialize_u512); - c.bench_function("deserialize_u512", deserialize_u512); - // c.bench_function("bytesrepr::serialize_account", serialize_account); - // c.bench_function("bytesrepr::deserialize_account", deserialize_account); - c.bench_function("bytesrepr::serialize_contract", serialize_contract); - c.bench_function("bytesrepr::deserialize_contract", deserialize_contract); - c.bench_function( - "bytesrepr::serialize_contract_package", - serialize_contract_package, - ); - c.bench_function( - "bytesrepr::deserialize_contract_package", - deserialize_contract_package, - ); - c.bench_function("bytesrepr::serialize_bid_small", |b| serialize_bid(10, b)); - c.bench_function("bytesrepr::serialize_bid_medium", |b| serialize_bid(100, b)); - c.bench_function("bytesrepr::serialize_bid_big", |b| serialize_bid(1000, b)); - c.bench_function("bytesrepr::deserialize_bid_small", |b| { - deserialize_bid(10, b) - }); - c.bench_function("bytesrepr::deserialize_bid_medium", |b| { - deserialize_bid(100, b) - }); - c.bench_function("bytesrepr::deserialize_bid_big", |b| { - deserialize_bid(1000, b) - }); - c.bench_function("bytesrepr::serialize_transfer", serialize_transfer); - c.bench_function("bytesrepr::deserialize_transfer", deserialize_transfer); - c.bench_function("bytesrepr::serialize_deploy_info", serialize_deploy_info); - c.bench_function( - "bytesrepr::deserialize_deploy_info", - deserialize_deploy_info, - ); - c.bench_function("bytesrepr::serialize_era_info", |b| { - serialize_era_info(500, b) - }); - c.bench_function("bytesrepr::deserialize_era_info", |b| { - deserialize_era_info(500, b) - }); -} - -criterion_group!(benches, bytesrepr_bench); -criterion_main!(benches); diff --git a/casper_types_ver_2_0/src/access_rights.rs b/casper_types_ver_2_0/src/access_rights.rs deleted file mode 100644 index dd12ea68..00000000 --- a/casper_types_ver_2_0/src/access_rights.rs +++ /dev/null @@ -1,421 +0,0 @@ -// This allow was added so that bitflags! macro won't fail on clippy -#![allow(clippy::bad_bit_mask)] -use alloc::{ - collections::{btree_map::Entry, BTreeMap}, - vec::Vec, -}; -use core::fmt::{self, Display, Formatter}; - -use bitflags::bitflags; -#[cfg(feature = "datasize")] -use datasize::DataSize; -use rand::{ - distributions::{Distribution, Standard}, - Rng, -}; -use serde::{de::Error as SerdeError, Deserialize, Deserializer, Serialize, Serializer}; - -use crate::{bytesrepr, AddressableEntityHash, URef, URefAddr}; - -/// The number of bytes in a serialized [`AccessRights`]. -pub const ACCESS_RIGHTS_SERIALIZED_LENGTH: usize = 1; - -bitflags! { - /// A struct which behaves like a set of bitflags to define access rights associated with a - /// [`URef`](crate::URef). - - #[cfg_attr(feature = "datasize", derive(DataSize))] - pub struct AccessRights: u8 { - /// No permissions - const NONE = 0; - /// Permission to read the value under the associated `URef`. - const READ = 0b001; - /// Permission to write a value under the associated `URef`. - const WRITE = 0b010; - /// Permission to add to the value under the associated `URef`. - const ADD = 0b100; - /// Permission to read or add to the value under the associated `URef`. - const READ_ADD = Self::READ.bits() | Self::ADD.bits(); - /// Permission to read or write the value under the associated `URef`. - const READ_WRITE = Self::READ.bits() | Self::WRITE.bits(); - /// Permission to add to, or write the value under the associated `URef`. - const ADD_WRITE = Self::ADD.bits() | Self::WRITE.bits(); - /// Permission to read, add to, or write the value under the associated `URef`. - const READ_ADD_WRITE = Self::READ.bits() | Self::ADD.bits() | Self::WRITE.bits(); - } -} - -impl Default for AccessRights { - fn default() -> Self { - AccessRights::NONE - } -} - -impl AccessRights { - /// Returns `true` if the `READ` flag is set. - pub fn is_readable(self) -> bool { - self & AccessRights::READ == AccessRights::READ - } - - /// Returns `true` if the `WRITE` flag is set. - pub fn is_writeable(self) -> bool { - self & AccessRights::WRITE == AccessRights::WRITE - } - - /// Returns `true` if the `ADD` flag is set. - pub fn is_addable(self) -> bool { - self & AccessRights::ADD == AccessRights::ADD - } - - /// Returns `true` if no flags are set. - pub fn is_none(self) -> bool { - self == AccessRights::NONE - } -} - -impl Display for AccessRights { - fn fmt(&self, f: &mut Formatter) -> fmt::Result { - match *self { - AccessRights::NONE => write!(f, "NONE"), - AccessRights::READ => write!(f, "READ"), - AccessRights::WRITE => write!(f, "WRITE"), - AccessRights::ADD => write!(f, "ADD"), - AccessRights::READ_ADD => write!(f, "READ_ADD"), - AccessRights::READ_WRITE => write!(f, "READ_WRITE"), - AccessRights::ADD_WRITE => write!(f, "ADD_WRITE"), - AccessRights::READ_ADD_WRITE => write!(f, "READ_ADD_WRITE"), - _ => write!(f, "UNKNOWN"), - } - } -} - -impl bytesrepr::ToBytes for AccessRights { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - self.bits().to_bytes() - } - - fn serialized_length(&self) -> usize { - ACCESS_RIGHTS_SERIALIZED_LENGTH - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - writer.push(self.bits()); - Ok(()) - } -} - -impl bytesrepr::FromBytes for AccessRights { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (id, rem) = u8::from_bytes(bytes)?; - match AccessRights::from_bits(id) { - Some(rights) => Ok((rights, rem)), - None => Err(bytesrepr::Error::Formatting), - } - } -} - -impl Serialize for AccessRights { - fn serialize(&self, serializer: S) -> Result { - self.bits().serialize(serializer) - } -} - -impl<'de> Deserialize<'de> for AccessRights { - fn deserialize>(deserializer: D) -> Result { - let bits = u8::deserialize(deserializer)?; - AccessRights::from_bits(bits).ok_or_else(|| SerdeError::custom("invalid bits")) - } -} - -impl Distribution for Standard { - fn sample(&self, rng: &mut R) -> AccessRights { - let mut result = AccessRights::NONE; - if rng.gen() { - result |= AccessRights::READ; - } - if rng.gen() { - result |= AccessRights::WRITE; - } - if rng.gen() { - result |= AccessRights::ADD; - } - result - } -} - -/// Used to indicate if a granted [`URef`] was already held by the context. -#[derive(Debug, PartialEq, Eq)] -pub enum GrantedAccess { - /// No new set of access rights were granted. - PreExisting, - /// A new set of access rights were granted. - Granted { - /// The address of the URef. - uref_addr: URefAddr, - /// The set of the newly granted access rights. - newly_granted_access_rights: AccessRights, - }, -} - -/// Access rights for a given runtime context. -#[derive(Debug, PartialEq, Eq)] -pub struct ContextAccessRights { - context_entity_hash: AddressableEntityHash, - access_rights: BTreeMap, -} - -impl ContextAccessRights { - /// Creates a new instance of access rights from an iterator of URefs merging any duplicates, - /// taking the union of their rights. - pub fn new>( - context_entity_hash: AddressableEntityHash, - uref_iter: T, - ) -> Self { - let mut context_access_rights = ContextAccessRights { - context_entity_hash, - access_rights: BTreeMap::new(), - }; - context_access_rights.do_extend(uref_iter); - context_access_rights - } - - /// Returns the current context key. - pub fn context_key(&self) -> AddressableEntityHash { - self.context_entity_hash - } - - /// Extends the current access rights from a given set of URefs. - pub fn extend(&mut self, urefs: &[URef]) { - self.do_extend(urefs.iter().copied()) - } - - /// Extends the current access rights from a given set of URefs. - fn do_extend>(&mut self, uref_iter: T) { - for uref in uref_iter { - match self.access_rights.entry(uref.addr()) { - Entry::Occupied(rights) => { - *rights.into_mut() = rights.get().union(uref.access_rights()); - } - Entry::Vacant(rights) => { - rights.insert(uref.access_rights()); - } - } - } - } - - /// Checks whether given uref has enough access rights. - pub fn has_access_rights_to_uref(&self, uref: &URef) -> bool { - if let Some(known_rights) = self.access_rights.get(&uref.addr()) { - let rights_to_check = uref.access_rights(); - known_rights.contains(rights_to_check) - } else { - // URef is not known - false - } - } - - /// Grants access to a [`URef`]; unless access was pre-existing. - pub fn grant_access(&mut self, uref: URef) -> GrantedAccess { - match self.access_rights.entry(uref.addr()) { - Entry::Occupied(existing_rights) => { - let newly_granted_access_rights = - uref.access_rights().difference(*existing_rights.get()); - *existing_rights.into_mut() = existing_rights.get().union(uref.access_rights()); - if newly_granted_access_rights.is_none() { - GrantedAccess::PreExisting - } else { - GrantedAccess::Granted { - uref_addr: uref.addr(), - newly_granted_access_rights, - } - } - } - Entry::Vacant(rights) => { - rights.insert(uref.access_rights()); - GrantedAccess::Granted { - uref_addr: uref.addr(), - newly_granted_access_rights: uref.access_rights(), - } - } - } - } - - /// Remove access for a given `URef`. - pub fn remove_access(&mut self, uref_addr: URefAddr, access_rights: AccessRights) { - if let Some(current_access_rights) = self.access_rights.get_mut(&uref_addr) { - current_access_rights.remove(access_rights) - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::UREF_ADDR_LENGTH; - - const ENTITY_HASH: AddressableEntityHash = AddressableEntityHash::new([1u8; 32]); - const UREF_ADDRESS: [u8; UREF_ADDR_LENGTH] = [1; UREF_ADDR_LENGTH]; - const UREF_NO_PERMISSIONS: URef = URef::new(UREF_ADDRESS, AccessRights::empty()); - const UREF_READ: URef = URef::new(UREF_ADDRESS, AccessRights::READ); - const UREF_ADD: URef = URef::new(UREF_ADDRESS, AccessRights::ADD); - const UREF_WRITE: URef = URef::new(UREF_ADDRESS, AccessRights::WRITE); - const UREF_READ_ADD: URef = URef::new(UREF_ADDRESS, AccessRights::READ_ADD); - const UREF_READ_ADD_WRITE: URef = URef::new(UREF_ADDRESS, AccessRights::READ_ADD_WRITE); - - fn test_readable(right: AccessRights, is_true: bool) { - assert_eq!(right.is_readable(), is_true) - } - - #[test] - fn test_is_readable() { - test_readable(AccessRights::READ, true); - test_readable(AccessRights::READ_ADD, true); - test_readable(AccessRights::READ_WRITE, true); - test_readable(AccessRights::READ_ADD_WRITE, true); - test_readable(AccessRights::ADD, false); - test_readable(AccessRights::ADD_WRITE, false); - test_readable(AccessRights::WRITE, false); - } - - fn test_writable(right: AccessRights, is_true: bool) { - assert_eq!(right.is_writeable(), is_true) - } - - #[test] - fn test_is_writable() { - test_writable(AccessRights::WRITE, true); - test_writable(AccessRights::READ_WRITE, true); - test_writable(AccessRights::ADD_WRITE, true); - test_writable(AccessRights::READ, false); - test_writable(AccessRights::ADD, false); - test_writable(AccessRights::READ_ADD, false); - test_writable(AccessRights::READ_ADD_WRITE, true); - } - - fn test_addable(right: AccessRights, is_true: bool) { - assert_eq!(right.is_addable(), is_true) - } - - #[test] - fn test_is_addable() { - test_addable(AccessRights::ADD, true); - test_addable(AccessRights::READ_ADD, true); - test_addable(AccessRights::READ_WRITE, false); - test_addable(AccessRights::ADD_WRITE, true); - test_addable(AccessRights::READ, false); - test_addable(AccessRights::WRITE, false); - test_addable(AccessRights::READ_ADD_WRITE, true); - } - - #[test] - fn should_check_has_access_rights_to_uref() { - let context_rights = ContextAccessRights::new(ENTITY_HASH, vec![UREF_READ_ADD]); - assert!(context_rights.has_access_rights_to_uref(&UREF_READ_ADD)); - assert!(context_rights.has_access_rights_to_uref(&UREF_READ)); - assert!(context_rights.has_access_rights_to_uref(&UREF_ADD)); - assert!(context_rights.has_access_rights_to_uref(&UREF_NO_PERMISSIONS)); - } - - #[test] - fn should_check_does_not_have_access_rights_to_uref() { - let context_rights = ContextAccessRights::new(ENTITY_HASH, vec![UREF_READ_ADD]); - assert!(!context_rights.has_access_rights_to_uref(&UREF_READ_ADD_WRITE)); - assert!(!context_rights - .has_access_rights_to_uref(&URef::new([2; UREF_ADDR_LENGTH], AccessRights::empty()))); - } - - #[test] - fn should_extend_access_rights() { - // Start with uref with no permissions. - let mut context_rights = ContextAccessRights::new(ENTITY_HASH, vec![UREF_NO_PERMISSIONS]); - let mut expected_rights = BTreeMap::new(); - expected_rights.insert(UREF_ADDRESS, AccessRights::empty()); - assert_eq!(context_rights.access_rights, expected_rights); - - // Extend with a READ_ADD: should merge to single READ_ADD. - context_rights.extend(&[UREF_READ_ADD]); - *expected_rights.get_mut(&UREF_ADDRESS).unwrap() = AccessRights::READ_ADD; - assert_eq!(context_rights.access_rights, expected_rights); - - // Extend with a READ: should have no observable effect. - context_rights.extend(&[UREF_READ]); - assert_eq!(context_rights.access_rights, expected_rights); - - // Extend with a WRITE: should merge to single READ_ADD_WRITE. - context_rights.extend(&[UREF_WRITE]); - *expected_rights.get_mut(&UREF_ADDRESS).unwrap() = AccessRights::READ_ADD_WRITE; - assert_eq!(context_rights.access_rights, expected_rights); - } - - #[test] - fn should_perform_union_of_access_rights_in_new() { - let context_rights = - ContextAccessRights::new(ENTITY_HASH, vec![UREF_NO_PERMISSIONS, UREF_READ, UREF_ADD]); - - // Expect the three discrete URefs' rights to be unioned into READ_ADD. - let mut expected_rights = BTreeMap::new(); - expected_rights.insert(UREF_ADDRESS, AccessRights::READ_ADD); - assert_eq!(context_rights.access_rights, expected_rights); - } - - #[test] - fn should_grant_access_rights() { - let mut context_rights = ContextAccessRights::new(ENTITY_HASH, vec![UREF_READ_ADD]); - let granted_access = context_rights.grant_access(UREF_READ); - assert_eq!(granted_access, GrantedAccess::PreExisting); - let granted_access = context_rights.grant_access(UREF_READ_ADD_WRITE); - assert_eq!( - granted_access, - GrantedAccess::Granted { - uref_addr: UREF_ADDRESS, - newly_granted_access_rights: AccessRights::WRITE - } - ); - assert!(context_rights.has_access_rights_to_uref(&UREF_READ_ADD_WRITE)); - let new_uref = URef::new([3; 32], AccessRights::all()); - let granted_access = context_rights.grant_access(new_uref); - assert_eq!( - granted_access, - GrantedAccess::Granted { - uref_addr: new_uref.addr(), - newly_granted_access_rights: AccessRights::all() - } - ); - assert!(context_rights.has_access_rights_to_uref(&new_uref)); - } - - #[test] - fn should_remove_access_rights() { - let mut context_rights = ContextAccessRights::new(ENTITY_HASH, vec![UREF_READ_ADD_WRITE]); - assert!(context_rights.has_access_rights_to_uref(&UREF_READ_ADD_WRITE)); - - // Strip write access from the context rights. - context_rights.remove_access(UREF_ADDRESS, AccessRights::WRITE); - assert!( - !context_rights.has_access_rights_to_uref(&UREF_READ_ADD_WRITE), - "Write access should have been removed" - ); - - // Strip the access again to ensure that the bit is not flipped back. - context_rights.remove_access(UREF_ADDRESS, AccessRights::WRITE); - assert!( - !context_rights.has_access_rights_to_uref(&UREF_READ_ADD_WRITE), - "Write access should not have been granted back" - ); - assert!( - context_rights.has_access_rights_to_uref(&UREF_READ_ADD), - "Read and add access should be preserved." - ); - - // Strip both read and add access from the context rights. - context_rights.remove_access(UREF_ADDRESS, AccessRights::READ_ADD); - assert!( - !context_rights.has_access_rights_to_uref(&UREF_READ_ADD), - "Read and add access should have been removed" - ); - assert!( - context_rights.has_access_rights_to_uref(&UREF_NO_PERMISSIONS), - "The access rights should be empty" - ); - } -} diff --git a/casper_types_ver_2_0/src/account.rs b/casper_types_ver_2_0/src/account.rs deleted file mode 100644 index 51641191..00000000 --- a/casper_types_ver_2_0/src/account.rs +++ /dev/null @@ -1,857 +0,0 @@ -//! Contains types and constants associated with user accounts. - -mod account_hash; -pub mod action_thresholds; -mod action_type; -pub mod associated_keys; -mod error; -mod weight; - -use serde::{Deserialize, Serialize}; - -use alloc::{collections::BTreeSet, vec::Vec}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(feature = "json-schema")] -use once_cell::sync::Lazy; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; - -pub use self::{ - account_hash::{AccountHash, ACCOUNT_HASH_FORMATTED_STRING_PREFIX, ACCOUNT_HASH_LENGTH}, - action_thresholds::ActionThresholds, - action_type::ActionType, - associated_keys::AssociatedKeys, - error::FromStrError, - weight::Weight, -}; - -use crate::{ - addressable_entity::{ - AddKeyFailure, NamedKeys, RemoveKeyFailure, SetThresholdFailure, UpdateKeyFailure, - }, - bytesrepr::{self, FromBytes, ToBytes}, - crypto, AccessRights, Key, URef, BLAKE2B_DIGEST_LENGTH, -}; -#[cfg(feature = "json-schema")] -use crate::{PublicKey, SecretKey}; - -#[cfg(feature = "json-schema")] -static ACCOUNT: Lazy = Lazy::new(|| { - let secret_key = SecretKey::ed25519_from_bytes([0; 32]).unwrap(); - let account_hash = PublicKey::from(&secret_key).to_account_hash(); - let main_purse = URef::from_formatted_str( - "uref-09480c3248ef76b603d386f3f4f8a5f87f597d4eaffd475433f861af187ab5db-007", - ) - .unwrap(); - let mut named_keys = NamedKeys::new(); - named_keys.insert("main_purse".to_string(), Key::URef(main_purse)); - let weight = Weight::new(1); - let associated_keys = AssociatedKeys::new(account_hash, weight); - let action_thresholds = ActionThresholds::new(weight, weight).unwrap(); - Account { - account_hash, - named_keys, - main_purse, - associated_keys, - action_thresholds, - } -}); - -/// Represents an Account in the global state. -#[derive(PartialEq, Eq, Clone, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[serde(deny_unknown_fields)] -pub struct Account { - account_hash: AccountHash, - named_keys: NamedKeys, - main_purse: URef, - associated_keys: AssociatedKeys, - action_thresholds: ActionThresholds, -} - -impl Account { - /// Creates a new account. - pub fn new( - account_hash: AccountHash, - named_keys: NamedKeys, - main_purse: URef, - associated_keys: AssociatedKeys, - action_thresholds: ActionThresholds, - ) -> Self { - Account { - account_hash, - named_keys, - main_purse, - associated_keys, - action_thresholds, - } - } - - /// An Account constructor with presets for associated_keys and action_thresholds. - /// - /// An account created with this method is valid and can be used as the target of a transaction. - /// It will be created with an [`AssociatedKeys`] with a [`Weight`] of 1, and a default - /// [`ActionThresholds`]. - pub fn create(account: AccountHash, named_keys: NamedKeys, main_purse: URef) -> Self { - let associated_keys = AssociatedKeys::new(account, Weight::new(1)); - - let action_thresholds: ActionThresholds = Default::default(); - Account::new( - account, - named_keys, - main_purse, - associated_keys, - action_thresholds, - ) - } - - /// Appends named keys to an account's named_keys field. - pub fn named_keys_append(&mut self, keys: NamedKeys) { - self.named_keys.append(keys); - } - - /// Returns named keys. - pub fn named_keys(&self) -> &NamedKeys { - &self.named_keys - } - - /// Removes the key under the given name from named keys. - pub fn remove_named_key(&mut self, name: &str) -> Option { - self.named_keys.remove(name) - } - - /// Returns account hash. - pub fn account_hash(&self) -> AccountHash { - self.account_hash - } - - /// Returns main purse. - pub fn main_purse(&self) -> URef { - self.main_purse - } - - /// Returns an [`AccessRights::ADD`]-only version of the main purse's [`URef`]. - pub fn main_purse_add_only(&self) -> URef { - URef::new(self.main_purse.addr(), AccessRights::ADD) - } - - /// Returns associated keys. - pub fn associated_keys(&self) -> &AssociatedKeys { - &self.associated_keys - } - - /// Returns action thresholds. - pub fn action_thresholds(&self) -> &ActionThresholds { - &self.action_thresholds - } - - /// Adds an associated key to an account. - pub fn add_associated_key( - &mut self, - account_hash: AccountHash, - weight: Weight, - ) -> Result<(), AddKeyFailure> { - self.associated_keys.add_key(account_hash, weight) - } - - /// Checks if removing given key would properly satisfy thresholds. - fn can_remove_key(&self, account_hash: AccountHash) -> bool { - let total_weight_without = self - .associated_keys - .total_keys_weight_excluding(account_hash); - - // Returns true if the total weight calculated without given public key would be greater or - // equal to all of the thresholds. - total_weight_without >= *self.action_thresholds().deployment() - && total_weight_without >= *self.action_thresholds().key_management() - } - - /// Checks if adding a weight to a sum of all weights excluding the given key would make the - /// resulting value to fall below any of the thresholds on account. - fn can_update_key(&self, account_hash: AccountHash, weight: Weight) -> bool { - // Calculates total weight of all keys excluding the given key - let total_weight = self - .associated_keys - .total_keys_weight_excluding(account_hash); - - // Safely calculate new weight by adding the updated weight - let new_weight = total_weight.value().saturating_add(weight.value()); - - // Returns true if the new weight would be greater or equal to all of - // the thresholds. - new_weight >= self.action_thresholds().deployment().value() - && new_weight >= self.action_thresholds().key_management().value() - } - - /// Removes an associated key from an account. - /// - /// Verifies that removing the key will not cause the remaining weight to fall below any action - /// thresholds. - pub fn remove_associated_key( - &mut self, - account_hash: AccountHash, - ) -> Result<(), RemoveKeyFailure> { - if self.associated_keys.contains_key(&account_hash) { - // Check if removing this weight would fall below thresholds - if !self.can_remove_key(account_hash) { - return Err(RemoveKeyFailure::ThresholdViolation); - } - } - self.associated_keys.remove_key(&account_hash) - } - - /// Updates an associated key. - /// - /// Returns an error if the update would result in a violation of the key management thresholds. - pub fn update_associated_key( - &mut self, - account_hash: AccountHash, - weight: Weight, - ) -> Result<(), UpdateKeyFailure> { - if let Some(current_weight) = self.associated_keys.get(&account_hash) { - if weight < *current_weight { - // New weight is smaller than current weight - if !self.can_update_key(account_hash, weight) { - return Err(UpdateKeyFailure::ThresholdViolation); - } - } - } - self.associated_keys.update_key(account_hash, weight) - } - - /// Sets a new action threshold for a given action type for the account. - /// - /// Returns an error if the new action threshold weight is greater than the total weight of the - /// account's associated keys. - pub fn set_action_threshold( - &mut self, - action_type: ActionType, - weight: Weight, - ) -> Result<(), SetThresholdFailure> { - // Verify if new threshold weight exceeds total weight of all associated - // keys. - self.can_set_threshold(weight)?; - // Set new weight for given action - self.action_thresholds.set_threshold(action_type, weight) - } - - /// Verifies if user can set action threshold. - pub fn can_set_threshold(&self, new_threshold: Weight) -> Result<(), SetThresholdFailure> { - let total_weight = self.associated_keys.total_keys_weight(); - if new_threshold > total_weight { - return Err(SetThresholdFailure::InsufficientTotalWeight); - } - Ok(()) - } - - /// Checks whether all authorization keys are associated with this account. - pub fn can_authorize(&self, authorization_keys: &BTreeSet) -> bool { - !authorization_keys.is_empty() - && authorization_keys - .iter() - .all(|e| self.associated_keys.contains_key(e)) - } - - /// Checks whether the sum of the weights of all authorization keys is - /// greater or equal to deploy threshold. - pub fn can_deploy_with(&self, authorization_keys: &BTreeSet) -> bool { - let total_weight = self - .associated_keys - .calculate_keys_weight(authorization_keys); - - total_weight >= *self.action_thresholds().deployment() - } - - /// Checks whether the sum of the weights of all authorization keys is - /// greater or equal to key management threshold. - pub fn can_manage_keys_with(&self, authorization_keys: &BTreeSet) -> bool { - let total_weight = self - .associated_keys - .calculate_keys_weight(authorization_keys); - - total_weight >= *self.action_thresholds().key_management() - } - - // This method is not intended to be used by third party crates. - #[doc(hidden)] - #[cfg(feature = "json-schema")] - pub fn example() -> &'static Self { - &ACCOUNT - } -} - -impl ToBytes for Account { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut result = bytesrepr::allocate_buffer(self)?; - self.account_hash().write_bytes(&mut result)?; - self.named_keys().write_bytes(&mut result)?; - self.main_purse.write_bytes(&mut result)?; - self.associated_keys().write_bytes(&mut result)?; - self.action_thresholds().write_bytes(&mut result)?; - Ok(result) - } - - fn serialized_length(&self) -> usize { - self.account_hash.serialized_length() - + self.named_keys.serialized_length() - + self.main_purse.serialized_length() - + self.associated_keys.serialized_length() - + self.action_thresholds.serialized_length() - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.account_hash().write_bytes(writer)?; - self.named_keys().write_bytes(writer)?; - self.main_purse().write_bytes(writer)?; - self.associated_keys().write_bytes(writer)?; - self.action_thresholds().write_bytes(writer)?; - Ok(()) - } -} - -impl FromBytes for Account { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (account_hash, rem) = AccountHash::from_bytes(bytes)?; - let (named_keys, rem) = NamedKeys::from_bytes(rem)?; - let (main_purse, rem) = URef::from_bytes(rem)?; - let (associated_keys, rem) = AssociatedKeys::from_bytes(rem)?; - let (action_thresholds, rem) = ActionThresholds::from_bytes(rem)?; - Ok(( - Account { - account_hash, - named_keys, - main_purse, - associated_keys, - action_thresholds, - }, - rem, - )) - } -} - -#[doc(hidden)] -#[deprecated( - since = "1.4.4", - note = "function moved to casper_types_ver_2_0::crypto::blake2b" -)] -pub fn blake2b>(data: T) -> [u8; BLAKE2B_DIGEST_LENGTH] { - crypto::blake2b(data) -} - -#[doc(hidden)] -#[cfg(any(feature = "testing", feature = "gens", test))] -pub mod gens { - use proptest::prelude::*; - - use crate::{ - account::{associated_keys::gens::account_associated_keys_arb, Account, Weight}, - gens::{account_hash_arb, named_keys_arb, uref_arb}, - }; - - use super::action_thresholds::gens::account_action_thresholds_arb; - - prop_compose! { - pub fn account_arb()( - account_hash in account_hash_arb(), - urefs in named_keys_arb(3), - purse in uref_arb(), - thresholds in account_action_thresholds_arb(), - mut associated_keys in account_associated_keys_arb(), - ) -> Account { - associated_keys.add_key(account_hash, Weight::new(1)).unwrap(); - Account::new( - account_hash, - urefs, - purse, - associated_keys, - thresholds, - ) - } - } -} - -#[cfg(test)] -mod tests { - use crate::{ - account::{ - Account, AccountHash, ActionThresholds, ActionType, AssociatedKeys, RemoveKeyFailure, - UpdateKeyFailure, Weight, - }, - addressable_entity::{NamedKeys, TryFromIntError}, - AccessRights, URef, - }; - use std::{collections::BTreeSet, convert::TryFrom, iter::FromIterator, vec::Vec}; - - use super::*; - - #[test] - fn account_hash_from_slice() { - let bytes: Vec = (0..32).collect(); - let account_hash = AccountHash::try_from(&bytes[..]).expect( - "should create account -hash", - ); - assert_eq!(&bytes, &account_hash.as_bytes()); - } - - #[test] - fn account_hash_from_slice_too_small() { - let _account_hash = - AccountHash::try_from(&[0u8; 31][..]).expect_err("should not create account hash"); - } - - #[test] - fn account_hash_from_slice_too_big() { - let _account_hash = - AccountHash::try_from(&[0u8; 33][..]).expect_err("should not create account hash"); - } - - #[test] - fn try_from_i32_for_set_threshold_failure() { - let max_valid_value_for_variant = SetThresholdFailure::InsufficientTotalWeight as i32; - assert_eq!( - Err(TryFromIntError(())), - SetThresholdFailure::try_from(max_valid_value_for_variant + 1), - "Did you forget to update `SetThresholdFailure::try_from` for a new variant of \ - `SetThresholdFailure`, or `max_valid_value_for_variant` in this test?" - ); - } - - #[test] - fn try_from_i32_for_add_key_failure() { - let max_valid_value_for_variant = AddKeyFailure::PermissionDenied as i32; - assert_eq!( - Err(TryFromIntError(())), - AddKeyFailure::try_from(max_valid_value_for_variant + 1), - "Did you forget to update `AddKeyFailure::try_from` for a new variant of \ - `AddKeyFailure`, or `max_valid_value_for_variant` in this test?" - ); - } - - #[test] - fn try_from_i32_for_remove_key_failure() { - let max_valid_value_for_variant = RemoveKeyFailure::ThresholdViolation as i32; - assert_eq!( - Err(TryFromIntError(())), - RemoveKeyFailure::try_from(max_valid_value_for_variant + 1), - "Did you forget to update `RemoveKeyFailure::try_from` for a new variant of \ - `RemoveKeyFailure`, or `max_valid_value_for_variant` in this test?" - ); - } - - #[test] - fn try_from_i32_for_update_key_failure() { - let max_valid_value_for_variant = UpdateKeyFailure::ThresholdViolation as i32; - assert_eq!( - Err(TryFromIntError(())), - UpdateKeyFailure::try_from(max_valid_value_for_variant + 1), - "Did you forget to update `UpdateKeyFailure::try_from` for a new variant of \ - `UpdateKeyFailure`, or `max_valid_value_for_variant` in this test?" - ); - } - - #[test] - fn account_hash_from_str() { - let account_hash = AccountHash([3; 32]); - let encoded = account_hash.to_formatted_string(); - let decoded = AccountHash::from_formatted_str(&encoded).unwrap(); - assert_eq!(account_hash, decoded); - - let invalid_prefix = - "accounthash-0000000000000000000000000000000000000000000000000000000000000000"; - assert!(AccountHash::from_formatted_str(invalid_prefix).is_err()); - - let invalid_prefix = - "account-hash0000000000000000000000000000000000000000000000000000000000000000"; - assert!(AccountHash::from_formatted_str(invalid_prefix).is_err()); - - let short_addr = - "account-hash-00000000000000000000000000000000000000000000000000000000000000"; - assert!(AccountHash::from_formatted_str(short_addr).is_err()); - - let long_addr = - "account-hash-000000000000000000000000000000000000000000000000000000000000000000"; - assert!(AccountHash::from_formatted_str(long_addr).is_err()); - - let invalid_hex = - "account-hash-000000000000000000000000000000000000000000000000000000000000000g"; - assert!(AccountHash::from_formatted_str(invalid_hex).is_err()); - } - - #[test] - fn account_hash_serde_roundtrip() { - let account_hash = AccountHash([255; 32]); - let serialized = bincode::serialize(&account_hash).unwrap(); - let decoded = bincode::deserialize(&serialized).unwrap(); - assert_eq!(account_hash, decoded); - } - - #[test] - fn account_hash_json_roundtrip() { - let account_hash = AccountHash([255; 32]); - let json_string = serde_json::to_string_pretty(&account_hash).unwrap(); - let decoded = serde_json::from_str(&json_string).unwrap(); - assert_eq!(account_hash, decoded); - } - - #[test] - fn associated_keys_can_authorize_keys() { - let key_1 = AccountHash::new([0; 32]); - let key_2 = AccountHash::new([1; 32]); - let key_3 = AccountHash::new([2; 32]); - let mut keys = AssociatedKeys::default(); - - keys.add_key(key_2, Weight::new(2)) - .expect("should add key_1"); - keys.add_key(key_1, Weight::new(1)) - .expect("should add key_1"); - keys.add_key(key_3, Weight::new(3)) - .expect("should add key_1"); - - let account = Account::new( - AccountHash::new([0u8; 32]), - NamedKeys::new(), - URef::new([0u8; 32], AccessRights::READ_ADD_WRITE), - keys, - // deploy: 33 (3*11) - ActionThresholds::new(Weight::new(33), Weight::new(48)) - .expect("should create thresholds"), - ); - - assert!(account.can_authorize(&BTreeSet::from_iter(vec![key_3, key_2, key_1]))); - assert!(account.can_authorize(&BTreeSet::from_iter(vec![key_1, key_3, key_2]))); - - assert!(account.can_authorize(&BTreeSet::from_iter(vec![key_1, key_2]))); - assert!(account.can_authorize(&BTreeSet::from_iter(vec![key_1]))); - - assert!(!account.can_authorize(&BTreeSet::from_iter(vec![ - key_1, - key_2, - AccountHash::new([42; 32]) - ]))); - assert!(!account.can_authorize(&BTreeSet::from_iter(vec![ - AccountHash::new([42; 32]), - key_1, - key_2 - ]))); - assert!(!account.can_authorize(&BTreeSet::from_iter(vec![ - AccountHash::new([43; 32]), - AccountHash::new([44; 32]), - AccountHash::new([42; 32]) - ]))); - assert!(!account.can_authorize(&BTreeSet::new())); - } - - #[test] - fn account_can_deploy_with() { - let associated_keys = { - let mut res = AssociatedKeys::new(AccountHash::new([1u8; 32]), Weight::new(1)); - res.add_key(AccountHash::new([2u8; 32]), Weight::new(11)) - .expect("should add key 1"); - res.add_key(AccountHash::new([3u8; 32]), Weight::new(11)) - .expect("should add key 2"); - res.add_key(AccountHash::new([4u8; 32]), Weight::new(11)) - .expect("should add key 3"); - res - }; - let account = Account::new( - AccountHash::new([0u8; 32]), - NamedKeys::new(), - URef::new([0u8; 32], AccessRights::READ_ADD_WRITE), - associated_keys, - // deploy: 33 (3*11) - ActionThresholds::new(Weight::new(33), Weight::new(48)) - .expect("should create thresholds"), - ); - - // sum: 22, required 33 - can't deploy - assert!(!account.can_deploy_with(&BTreeSet::from_iter(vec![ - AccountHash::new([3u8; 32]), - AccountHash::new([2u8; 32]), - ]))); - - // sum: 33, required 33 - can deploy - assert!(account.can_deploy_with(&BTreeSet::from_iter(vec![ - AccountHash::new([4u8; 32]), - AccountHash::new([3u8; 32]), - AccountHash::new([2u8; 32]), - ]))); - - // sum: 34, required 33 - can deploy - assert!(account.can_deploy_with(&BTreeSet::from_iter(vec![ - AccountHash::new([2u8; 32]), - AccountHash::new([1u8; 32]), - AccountHash::new([4u8; 32]), - AccountHash::new([3u8; 32]), - ]))); - } - - #[test] - fn account_can_manage_keys_with() { - let associated_keys = { - let mut res = AssociatedKeys::new(AccountHash::new([1u8; 32]), Weight::new(1)); - res.add_key(AccountHash::new([2u8; 32]), Weight::new(11)) - .expect("should add key 1"); - res.add_key(AccountHash::new([3u8; 32]), Weight::new(11)) - .expect("should add key 2"); - res.add_key(AccountHash::new([4u8; 32]), Weight::new(11)) - .expect("should add key 3"); - res - }; - let account = Account::new( - AccountHash::new([0u8; 32]), - NamedKeys::new(), - URef::new([0u8; 32], AccessRights::READ_ADD_WRITE), - associated_keys, - // deploy: 33 (3*11) - ActionThresholds::new(Weight::new(11), Weight::new(33)) - .expect("should create thresholds"), - ); - - // sum: 22, required 33 - can't manage - assert!(!account.can_manage_keys_with(&BTreeSet::from_iter(vec![ - AccountHash::new([3u8; 32]), - AccountHash::new([2u8; 32]), - ]))); - - // sum: 33, required 33 - can manage - assert!(account.can_manage_keys_with(&BTreeSet::from_iter(vec![ - AccountHash::new([4u8; 32]), - AccountHash::new([3u8; 32]), - AccountHash::new([2u8; 32]), - ]))); - - // sum: 34, required 33 - can manage - assert!(account.can_manage_keys_with(&BTreeSet::from_iter(vec![ - AccountHash::new([2u8; 32]), - AccountHash::new([1u8; 32]), - AccountHash::new([4u8; 32]), - AccountHash::new([3u8; 32]), - ]))); - } - - #[test] - fn set_action_threshold_higher_than_total_weight() { - let identity_key = AccountHash::new([1u8; 32]); - let key_1 = AccountHash::new([2u8; 32]); - let key_2 = AccountHash::new([3u8; 32]); - let key_3 = AccountHash::new([4u8; 32]); - let associated_keys = { - let mut res = AssociatedKeys::new(identity_key, Weight::new(1)); - res.add_key(key_1, Weight::new(2)) - .expect("should add key 1"); - res.add_key(key_2, Weight::new(3)) - .expect("should add key 2"); - res.add_key(key_3, Weight::new(4)) - .expect("should add key 3"); - res - }; - let mut account = Account::new( - AccountHash::new([0u8; 32]), - NamedKeys::new(), - URef::new([0u8; 32], AccessRights::READ_ADD_WRITE), - associated_keys, - // deploy: 33 (3*11) - ActionThresholds::new(Weight::new(33), Weight::new(48)) - .expect("should create thresholds"), - ); - - assert_eq!( - account - .set_action_threshold(ActionType::Deployment, Weight::new(1 + 2 + 3 + 4 + 1)) - .unwrap_err(), - SetThresholdFailure::InsufficientTotalWeight, - ); - assert_eq!( - account - .set_action_threshold(ActionType::Deployment, Weight::new(1 + 2 + 3 + 4 + 245)) - .unwrap_err(), - SetThresholdFailure::InsufficientTotalWeight, - ) - } - - #[test] - fn remove_key_would_violate_action_thresholds() { - let identity_key = AccountHash::new([1u8; 32]); - let key_1 = AccountHash::new([2u8; 32]); - let key_2 = AccountHash::new([3u8; 32]); - let key_3 = AccountHash::new([4u8; 32]); - let associated_keys = { - let mut res = AssociatedKeys::new(identity_key, Weight::new(1)); - res.add_key(key_1, Weight::new(2)) - .expect("should add key 1"); - res.add_key(key_2, Weight::new(3)) - .expect("should add key 2"); - res.add_key(key_3, Weight::new(4)) - .expect("should add key 3"); - res - }; - let mut account = Account::new( - AccountHash::new([0u8; 32]), - NamedKeys::new(), - URef::new([0u8; 32], AccessRights::READ_ADD_WRITE), - associated_keys, - // deploy: 33 (3*11) - ActionThresholds::new(Weight::new(1 + 2 + 3 + 4), Weight::new(1 + 2 + 3 + 4 + 5)) - .expect("should create thresholds"), - ); - - assert_eq!( - account.remove_associated_key(key_3).unwrap_err(), - RemoveKeyFailure::ThresholdViolation, - ) - } - - #[test] - fn updating_key_would_violate_action_thresholds() { - let identity_key = AccountHash::new([1u8; 32]); - let identity_key_weight = Weight::new(1); - let key_1 = AccountHash::new([2u8; 32]); - let key_1_weight = Weight::new(2); - let key_2 = AccountHash::new([3u8; 32]); - let key_2_weight = Weight::new(3); - let key_3 = AccountHash::new([4u8; 32]); - let key_3_weight = Weight::new(4); - let associated_keys = { - let mut res = AssociatedKeys::new(identity_key, identity_key_weight); - res.add_key(key_1, key_1_weight).expect("should add key 1"); - res.add_key(key_2, key_2_weight).expect("should add key 2"); - res.add_key(key_3, key_3_weight).expect("should add key 3"); - // 1 + 2 + 3 + 4 - res - }; - - let deployment_threshold = Weight::new( - identity_key_weight.value() - + key_1_weight.value() - + key_2_weight.value() - + key_3_weight.value(), - ); - let key_management_threshold = Weight::new(deployment_threshold.value() + 1); - let mut account = Account::new( - identity_key, - NamedKeys::new(), - URef::new([0u8; 32], AccessRights::READ_ADD_WRITE), - associated_keys, - // deploy: 33 (3*11) - ActionThresholds::new(deployment_threshold, key_management_threshold) - .expect("should create thresholds"), - ); - - // Decreases by 3 - assert_eq!( - account - .clone() - .update_associated_key(key_3, Weight::new(1)) - .unwrap_err(), - UpdateKeyFailure::ThresholdViolation, - ); - - // increase total weight (12) - account - .update_associated_key(identity_key, Weight::new(3)) - .unwrap(); - - // variant a) decrease total weight by 1 (total 11) - account - .clone() - .update_associated_key(key_3, Weight::new(3)) - .unwrap(); - // variant b) decrease total weight by 3 (total 9) - fail - assert_eq!( - account - .update_associated_key(key_3, Weight::new(1)) - .unwrap_err(), - UpdateKeyFailure::ThresholdViolation - ); - } - - #[test] - fn overflowing_should_allow_removal() { - let identity_key = AccountHash::new([42; 32]); - let key_1 = AccountHash::new([2u8; 32]); - let key_2 = AccountHash::new([3u8; 32]); - - let associated_keys = { - // Identity - let mut res = AssociatedKeys::new(identity_key, Weight::new(1)); - - // Spare key - res.add_key(key_1, Weight::new(2)) - .expect("should add key 1"); - // Big key - res.add_key(key_2, Weight::new(255)) - .expect("should add key 2"); - - res - }; - - let mut account = Account::new( - identity_key, - NamedKeys::new(), - URef::new([0u8; 32], AccessRights::READ_ADD_WRITE), - associated_keys, - ActionThresholds::new(Weight::new(1), Weight::new(254)) - .expect("should create thresholds"), - ); - - account.remove_associated_key(key_1).expect("should work") - } - - #[test] - fn overflowing_should_allow_updating() { - let identity_key = AccountHash::new([1; 32]); - let identity_key_weight = Weight::new(1); - let key_1 = AccountHash::new([2u8; 32]); - let key_1_weight = Weight::new(3); - let key_2 = AccountHash::new([3u8; 32]); - let key_2_weight = Weight::new(255); - let deployment_threshold = Weight::new(1); - let key_management_threshold = Weight::new(254); - - let associated_keys = { - // Identity - let mut res = AssociatedKeys::new(identity_key, identity_key_weight); - - // Spare key - res.add_key(key_1, key_1_weight).expect("should add key 1"); - // Big key - res.add_key(key_2, key_2_weight).expect("should add key 2"); - - res - }; - - let mut account = Account::new( - identity_key, - NamedKeys::new(), - URef::new([0u8; 32], AccessRights::READ_ADD_WRITE), - associated_keys, - ActionThresholds::new(deployment_threshold, key_management_threshold) - .expect("should create thresholds"), - ); - - // decrease so total weight would be changed from 1 + 3 + 255 to 1 + 1 + 255 - account - .update_associated_key(key_1, Weight::new(1)) - .expect("should work"); - } -} - -#[cfg(test)] -mod proptests { - use proptest::prelude::*; - - use crate::bytesrepr; - - use super::*; - - proptest! { - #[test] - fn test_value_account(acct in gens::account_arb()) { - bytesrepr::test_serialization_roundtrip(&acct); - } - } -} diff --git a/casper_types_ver_2_0/src/account/account_hash.rs b/casper_types_ver_2_0/src/account/account_hash.rs deleted file mode 100644 index 1e4ff6d1..00000000 --- a/casper_types_ver_2_0/src/account/account_hash.rs +++ /dev/null @@ -1,212 +0,0 @@ -use alloc::{string::String, vec::Vec}; -use core::{ - convert::{From, TryFrom}, - fmt::{Debug, Display, Formatter}, -}; -#[cfg(feature = "datasize")] -use datasize::DataSize; -use rand::{ - distributions::{Distribution, Standard}, - Rng, -}; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{de::Error as SerdeError, Deserialize, Deserializer, Serialize, Serializer}; - -use crate::{ - addressable_entity::FromStrError, - bytesrepr::{Error, FromBytes, ToBytes}, - checksummed_hex, crypto, CLType, CLTyped, PublicKey, BLAKE2B_DIGEST_LENGTH, -}; - -/// The length in bytes of a [`AccountHash`]. -pub const ACCOUNT_HASH_LENGTH: usize = 32; -/// The prefix applied to the hex-encoded `AccountHash` to produce a formatted string -/// representation. -pub const ACCOUNT_HASH_FORMATTED_STRING_PREFIX: &str = "account-hash-"; - -/// A newtype wrapping an array which contains the raw bytes of -/// the AccountHash, a hash of Public Key and Algorithm -#[derive(Default, PartialOrd, Ord, PartialEq, Eq, Hash, Clone, Copy)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr( - feature = "json-schema", - derive(JsonSchema), - schemars(description = "Account hash as a formatted string.") -)] -pub struct AccountHash( - #[cfg_attr(feature = "json-schema", schemars(skip, with = "String"))] - pub [u8; ACCOUNT_HASH_LENGTH], -); - -impl AccountHash { - /// Constructs a new `AccountHash` instance from the raw bytes of an Public Key Account Hash. - pub const fn new(value: [u8; ACCOUNT_HASH_LENGTH]) -> AccountHash { - AccountHash(value) - } - - /// Returns the raw bytes of the account hash as an array. - pub fn value(&self) -> [u8; ACCOUNT_HASH_LENGTH] { - self.0 - } - - /// Returns the raw bytes of the account hash as a `slice`. - pub fn as_bytes(&self) -> &[u8] { - &self.0 - } - - /// Formats the `AccountHash` for users getting and putting. - pub fn to_formatted_string(self) -> String { - format!( - "{}{}", - ACCOUNT_HASH_FORMATTED_STRING_PREFIX, - base16::encode_lower(&self.0), - ) - } - - /// Parses a string formatted as per `Self::to_formatted_string()` into an `AccountHash`. - pub fn from_formatted_str(input: &str) -> Result { - let remainder = input - .strip_prefix(ACCOUNT_HASH_FORMATTED_STRING_PREFIX) - .ok_or(FromStrError::InvalidPrefix)?; - let bytes = - <[u8; ACCOUNT_HASH_LENGTH]>::try_from(checksummed_hex::decode(remainder)?.as_ref())?; - Ok(AccountHash(bytes)) - } - - /// Parses a `PublicKey` and outputs the corresponding account hash. - pub fn from_public_key( - public_key: &PublicKey, - blake2b_hash_fn: impl Fn(Vec) -> [u8; BLAKE2B_DIGEST_LENGTH], - ) -> Self { - const SYSTEM_LOWERCASE: &str = "system"; - const ED25519_LOWERCASE: &str = "ed25519"; - const SECP256K1_LOWERCASE: &str = "secp256k1"; - - let algorithm_name = match public_key { - PublicKey::System => SYSTEM_LOWERCASE, - PublicKey::Ed25519(_) => ED25519_LOWERCASE, - PublicKey::Secp256k1(_) => SECP256K1_LOWERCASE, - }; - let public_key_bytes: Vec = public_key.into(); - - // Prepare preimage based on the public key parameters. - let preimage = { - let mut data = Vec::with_capacity(algorithm_name.len() + public_key_bytes.len() + 1); - data.extend(algorithm_name.as_bytes()); - data.push(0); - data.extend(public_key_bytes); - data - }; - // Hash the preimage data using blake2b256 and return it. - let digest = blake2b_hash_fn(preimage); - Self::new(digest) - } -} - -impl Serialize for AccountHash { - fn serialize(&self, serializer: S) -> Result { - if serializer.is_human_readable() { - self.to_formatted_string().serialize(serializer) - } else { - self.0.serialize(serializer) - } - } -} - -impl<'de> Deserialize<'de> for AccountHash { - fn deserialize>(deserializer: D) -> Result { - if deserializer.is_human_readable() { - let formatted_string = String::deserialize(deserializer)?; - AccountHash::from_formatted_str(&formatted_string).map_err(SerdeError::custom) - } else { - let bytes = <[u8; ACCOUNT_HASH_LENGTH]>::deserialize(deserializer)?; - Ok(AccountHash(bytes)) - } - } -} - -impl TryFrom<&[u8]> for AccountHash { - type Error = TryFromSliceForAccountHashError; - - fn try_from(bytes: &[u8]) -> Result { - <[u8; ACCOUNT_HASH_LENGTH]>::try_from(bytes) - .map(AccountHash::new) - .map_err(|_| TryFromSliceForAccountHashError(())) - } -} - -impl TryFrom<&alloc::vec::Vec> for AccountHash { - type Error = TryFromSliceForAccountHashError; - - fn try_from(bytes: &Vec) -> Result { - <[u8; ACCOUNT_HASH_LENGTH]>::try_from(bytes as &[u8]) - .map(AccountHash::new) - .map_err(|_| TryFromSliceForAccountHashError(())) - } -} - -impl From<&PublicKey> for AccountHash { - fn from(public_key: &PublicKey) -> Self { - AccountHash::from_public_key(public_key, crypto::blake2b) - } -} - -impl Display for AccountHash { - fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result { - write!(f, "{}", base16::encode_lower(&self.0)) - } -} - -impl Debug for AccountHash { - fn fmt(&self, f: &mut Formatter) -> core::fmt::Result { - write!(f, "AccountHash({})", base16::encode_lower(&self.0)) - } -} - -impl CLTyped for AccountHash { - fn cl_type() -> CLType { - CLType::ByteArray(ACCOUNT_HASH_LENGTH as u32) - } -} - -impl ToBytes for AccountHash { - #[inline(always)] - fn to_bytes(&self) -> Result, Error> { - self.0.to_bytes() - } - - #[inline(always)] - fn serialized_length(&self) -> usize { - self.0.serialized_length() - } - - #[inline(always)] - fn write_bytes(&self, writer: &mut Vec) -> Result<(), Error> { - writer.extend_from_slice(&self.0); - Ok(()) - } -} - -impl FromBytes for AccountHash { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (bytes, rem) = FromBytes::from_bytes(bytes)?; - Ok((AccountHash::new(bytes), rem)) - } -} - -impl AsRef<[u8]> for AccountHash { - fn as_ref(&self) -> &[u8] { - self.0.as_ref() - } -} - -/// Associated error type of `TryFrom<&[u8]>` for [`AccountHash`]. -#[derive(Debug)] -pub struct TryFromSliceForAccountHashError(()); - -impl Distribution for Standard { - fn sample(&self, rng: &mut R) -> AccountHash { - AccountHash::new(rng.gen()) - } -} diff --git a/casper_types_ver_2_0/src/account/action_thresholds.rs b/casper_types_ver_2_0/src/account/action_thresholds.rs deleted file mode 100644 index ce2e492c..00000000 --- a/casper_types_ver_2_0/src/account/action_thresholds.rs +++ /dev/null @@ -1,175 +0,0 @@ -//! This module contains types and functions for managing action thresholds. - -use alloc::vec::Vec; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -use crate::{ - account::{ActionType, SetThresholdFailure, Weight}, - addressable_entity::WEIGHT_SERIALIZED_LENGTH, - bytesrepr::{self, Error, FromBytes, ToBytes}, -}; - -/// Thresholds that have to be met when executing an action of a certain type. -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[cfg_attr(feature = "json-schema", schemars(rename = "AccountActionThresholds"))] -pub struct ActionThresholds { - /// Threshold for deploy execution. - pub deployment: Weight, - /// Threshold for managing action threshold. - pub key_management: Weight, -} - -impl ActionThresholds { - /// Creates new ActionThresholds object with provided weights - /// - /// Requires deployment threshold to be lower than or equal to - /// key management threshold. - pub fn new( - deployment: Weight, - key_management: Weight, - ) -> Result { - if deployment > key_management { - return Err(SetThresholdFailure::DeploymentThreshold); - } - Ok(ActionThresholds { - deployment, - key_management, - }) - } - /// Sets new threshold for [ActionType::Deployment]. - /// Should return an error if setting new threshold for `action_type` breaks - /// one of the invariants. Currently, invariant is that - /// `ActionType::Deployment` threshold shouldn't be higher than any - /// other, which should be checked both when increasing `Deployment` - /// threshold and decreasing the other. - pub fn set_deployment_threshold( - &mut self, - new_threshold: Weight, - ) -> Result<(), SetThresholdFailure> { - if new_threshold > self.key_management { - Err(SetThresholdFailure::DeploymentThreshold) - } else { - self.deployment = new_threshold; - Ok(()) - } - } - - /// Sets new threshold for [ActionType::KeyManagement]. - pub fn set_key_management_threshold( - &mut self, - new_threshold: Weight, - ) -> Result<(), SetThresholdFailure> { - if self.deployment > new_threshold { - Err(SetThresholdFailure::KeyManagementThreshold) - } else { - self.key_management = new_threshold; - Ok(()) - } - } - - /// Returns the deployment action threshold. - pub fn deployment(&self) -> &Weight { - &self.deployment - } - - /// Returns key management action threshold. - pub fn key_management(&self) -> &Weight { - &self.key_management - } - - /// Unified function that takes an action type, and changes appropriate - /// threshold defined by the [ActionType] variants. - pub fn set_threshold( - &mut self, - action_type: ActionType, - new_threshold: Weight, - ) -> Result<(), SetThresholdFailure> { - match action_type { - ActionType::Deployment => self.set_deployment_threshold(new_threshold), - ActionType::KeyManagement => self.set_key_management_threshold(new_threshold), - } - } -} - -impl Default for ActionThresholds { - fn default() -> Self { - ActionThresholds { - deployment: Weight::new(1), - key_management: Weight::new(1), - } - } -} - -impl ToBytes for ActionThresholds { - fn to_bytes(&self) -> Result, Error> { - let mut result = bytesrepr::unchecked_allocate_buffer(self); - result.append(&mut self.deployment.to_bytes()?); - result.append(&mut self.key_management.to_bytes()?); - Ok(result) - } - - fn serialized_length(&self) -> usize { - 2 * WEIGHT_SERIALIZED_LENGTH - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.deployment().write_bytes(writer)?; - self.key_management().write_bytes(writer)?; - Ok(()) - } -} - -impl FromBytes for ActionThresholds { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (deployment, rem) = Weight::from_bytes(bytes)?; - let (key_management, rem) = Weight::from_bytes(rem)?; - let ret = ActionThresholds { - deployment, - key_management, - }; - Ok((ret, rem)) - } -} - -#[doc(hidden)] -#[cfg(any(feature = "testing", feature = "gens", test))] -pub mod gens { - use proptest::prelude::*; - - use super::ActionThresholds; - - pub fn account_action_thresholds_arb() -> impl Strategy { - Just(Default::default()) - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn should_create_new_action_thresholds() { - let action_thresholds = ActionThresholds::new(Weight::new(1), Weight::new(42)).unwrap(); - assert_eq!(*action_thresholds.deployment(), Weight::new(1)); - assert_eq!(*action_thresholds.key_management(), Weight::new(42)); - } - - #[test] - fn should_not_create_action_thresholds_with_invalid_deployment_threshold() { - // deployment cant be greater than key management - assert!(ActionThresholds::new(Weight::new(5), Weight::new(1)).is_err()); - } - - #[test] - fn serialization_roundtrip() { - let action_thresholds = ActionThresholds::new(Weight::new(1), Weight::new(42)).unwrap(); - bytesrepr::test_serialization_roundtrip(&action_thresholds); - } -} diff --git a/casper_types_ver_2_0/src/account/action_type.rs b/casper_types_ver_2_0/src/account/action_type.rs deleted file mode 100644 index 65848f79..00000000 --- a/casper_types_ver_2_0/src/account/action_type.rs +++ /dev/null @@ -1,32 +0,0 @@ -use core::convert::TryFrom; - -use crate::addressable_entity::TryFromIntError; - -/// The various types of action which can be performed in the context of a given account. -#[repr(u32)] -pub enum ActionType { - /// Represents performing a deploy. - Deployment = 0, - /// Represents changing the associated keys (i.e. map of [`AccountHash`](super::AccountHash)s - /// to [`Weight`](super::Weight)s) or action thresholds (i.e. the total - /// [`Weight`](super::Weight)s of signing [`AccountHash`](super::AccountHash)s required to - /// perform various actions). - KeyManagement = 1, -} - -// This conversion is not intended to be used by third party crates. -#[doc(hidden)] -impl TryFrom for ActionType { - type Error = TryFromIntError; - - fn try_from(value: u32) -> Result { - // This doesn't use `num_derive` traits such as FromPrimitive and ToPrimitive - // that helps to automatically create `from_u32` and `to_u32`. This approach - // gives better control over generated code. - match value { - d if d == ActionType::Deployment as u32 => Ok(ActionType::Deployment), - d if d == ActionType::KeyManagement as u32 => Ok(ActionType::KeyManagement), - _ => Err(TryFromIntError(())), - } - } -} diff --git a/casper_types_ver_2_0/src/account/associated_keys.rs b/casper_types_ver_2_0/src/account/associated_keys.rs deleted file mode 100644 index aa7d3e91..00000000 --- a/casper_types_ver_2_0/src/account/associated_keys.rs +++ /dev/null @@ -1,381 +0,0 @@ -//! This module contains types and functions for working with keys associated with an account. - -use alloc::{ - collections::{btree_map::Entry, BTreeMap, BTreeSet}, - vec::Vec, -}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; -#[cfg(feature = "json-schema")] -use serde_map_to_array::KeyValueJsonSchema; -use serde_map_to_array::{BTreeMapToArray, KeyValueLabels}; - -use crate::{ - account::{AccountHash, Weight}, - addressable_entity::{AddKeyFailure, RemoveKeyFailure, UpdateKeyFailure}, - bytesrepr::{self, FromBytes, ToBytes}, -}; - -/// A collection of weighted public keys (represented as account hashes) associated with an account. -#[derive(Default, PartialOrd, Ord, PartialEq, Eq, Clone, Debug, Serialize, Deserialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[cfg_attr(feature = "json-schema", schemars(rename = "AccountAssociatedKeys"))] -#[serde(deny_unknown_fields)] -#[rustfmt::skip] -pub struct AssociatedKeys( - #[serde(with = "BTreeMapToArray::")] - BTreeMap, -); - -impl AssociatedKeys { - /// Constructs a new AssociatedKeys. - pub fn new(key: AccountHash, weight: Weight) -> AssociatedKeys { - let mut bt: BTreeMap = BTreeMap::new(); - bt.insert(key, weight); - AssociatedKeys(bt) - } - - /// Adds a new AssociatedKey to the set. - /// - /// Returns true if added successfully, false otherwise. - pub fn add_key(&mut self, key: AccountHash, weight: Weight) -> Result<(), AddKeyFailure> { - match self.0.entry(key) { - Entry::Vacant(entry) => { - entry.insert(weight); - } - Entry::Occupied(_) => return Err(AddKeyFailure::DuplicateKey), - } - Ok(()) - } - - /// Removes key from the associated keys set. - /// Returns true if value was found in the set prior to the removal, false - /// otherwise. - pub fn remove_key(&mut self, key: &AccountHash) -> Result<(), RemoveKeyFailure> { - self.0 - .remove(key) - .map(|_| ()) - .ok_or(RemoveKeyFailure::MissingKey) - } - - /// Adds new AssociatedKey to the set. - /// Returns true if added successfully, false otherwise. - pub fn update_key(&mut self, key: AccountHash, weight: Weight) -> Result<(), UpdateKeyFailure> { - match self.0.entry(key) { - Entry::Vacant(_) => { - return Err(UpdateKeyFailure::MissingKey); - } - Entry::Occupied(mut entry) => { - *entry.get_mut() = weight; - } - } - Ok(()) - } - - /// Returns the weight of an account hash. - pub fn get(&self, key: &AccountHash) -> Option<&Weight> { - self.0.get(key) - } - - /// Returns `true` if a given key exists. - pub fn contains_key(&self, key: &AccountHash) -> bool { - self.0.contains_key(key) - } - - /// Returns an iterator over the account hash and the weights. - pub fn iter(&self) -> impl Iterator { - self.0.iter() - } - - /// Returns the count of the associated keys. - pub fn len(&self) -> usize { - self.0.len() - } - - /// Returns `true` if the associated keys are empty. - pub fn is_empty(&self) -> bool { - self.0.is_empty() - } - - /// Helper method that calculates weight for keys that comes from any - /// source. - /// - /// This method is not concerned about uniqueness of the passed iterable. - /// Uniqueness is determined based on the input collection properties, - /// which is either BTreeSet (in [`AssociatedKeys::calculate_keys_weight`]) - /// or BTreeMap (in [`AssociatedKeys::total_keys_weight`]). - fn calculate_any_keys_weight<'a>(&self, keys: impl Iterator) -> Weight { - let total = keys - .filter_map(|key| self.0.get(key)) - .fold(0u8, |acc, w| acc.saturating_add(w.value())); - - Weight::new(total) - } - - /// Calculates total weight of authorization keys provided by an argument - pub fn calculate_keys_weight(&self, authorization_keys: &BTreeSet) -> Weight { - self.calculate_any_keys_weight(authorization_keys.iter()) - } - - /// Calculates total weight of all authorization keys - pub fn total_keys_weight(&self) -> Weight { - self.calculate_any_keys_weight(self.0.keys()) - } - - /// Calculates total weight of all authorization keys excluding a given key - pub fn total_keys_weight_excluding(&self, account_hash: AccountHash) -> Weight { - self.calculate_any_keys_weight(self.0.keys().filter(|&&element| element != account_hash)) - } -} - -impl From> for AssociatedKeys { - fn from(associated_keys: BTreeMap) -> Self { - Self(associated_keys) - } -} - -impl From for BTreeMap { - fn from(associated_keys: AssociatedKeys) -> Self { - associated_keys.0 - } -} - -impl ToBytes for AssociatedKeys { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - self.0.to_bytes() - } - - fn serialized_length(&self) -> usize { - self.0.serialized_length() - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.0.write_bytes(writer) - } -} - -impl FromBytes for AssociatedKeys { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (associated_keys, rem) = FromBytes::from_bytes(bytes)?; - Ok((AssociatedKeys(associated_keys), rem)) - } -} - -struct Labels; - -impl KeyValueLabels for Labels { - const KEY: &'static str = "account_hash"; - const VALUE: &'static str = "weight"; -} - -#[cfg(feature = "json-schema")] -impl KeyValueJsonSchema for Labels { - const JSON_SCHEMA_KV_NAME: Option<&'static str> = Some("AssociatedKey"); - const JSON_SCHEMA_KV_DESCRIPTION: Option<&'static str> = Some("A weighted public key."); - const JSON_SCHEMA_KEY_DESCRIPTION: Option<&'static str> = - Some("The account hash of the public key."); - const JSON_SCHEMA_VALUE_DESCRIPTION: Option<&'static str> = - Some("The weight assigned to the public key."); -} - -#[doc(hidden)] -#[cfg(any(feature = "testing", feature = "gens", test))] -pub mod gens { - use proptest::prelude::*; - - use crate::gens::{account_hash_arb, account_weight_arb}; - - use super::AssociatedKeys; - - pub fn account_associated_keys_arb() -> impl Strategy { - proptest::collection::btree_map(account_hash_arb(), account_weight_arb(), 10).prop_map( - |keys| { - let mut associated_keys = AssociatedKeys::default(); - keys.into_iter().for_each(|(k, v)| { - associated_keys.add_key(k, v).unwrap(); - }); - associated_keys - }, - ) - } -} - -#[cfg(test)] -mod tests { - use std::{collections::BTreeSet, iter::FromIterator}; - - use crate::{ - account::{AccountHash, Weight, ACCOUNT_HASH_LENGTH}, - bytesrepr, - }; - - use super::*; - - #[test] - fn associated_keys_add() { - let mut keys = - AssociatedKeys::new(AccountHash::new([0u8; ACCOUNT_HASH_LENGTH]), Weight::new(1)); - let new_pk = AccountHash::new([1u8; ACCOUNT_HASH_LENGTH]); - let new_pk_weight = Weight::new(2); - assert!(keys.add_key(new_pk, new_pk_weight).is_ok()); - assert_eq!(keys.get(&new_pk), Some(&new_pk_weight)) - } - - #[test] - fn associated_keys_add_duplicate() { - let pk = AccountHash::new([0u8; ACCOUNT_HASH_LENGTH]); - let weight = Weight::new(1); - let mut keys = AssociatedKeys::new(pk, weight); - assert_eq!( - keys.add_key(pk, Weight::new(10)), - Err(AddKeyFailure::DuplicateKey) - ); - assert_eq!(keys.get(&pk), Some(&weight)); - } - - #[test] - fn associated_keys_remove() { - let pk = AccountHash::new([0u8; ACCOUNT_HASH_LENGTH]); - let weight = Weight::new(1); - let mut keys = AssociatedKeys::new(pk, weight); - assert!(keys.remove_key(&pk).is_ok()); - assert!(keys - .remove_key(&AccountHash::new([1u8; ACCOUNT_HASH_LENGTH])) - .is_err()); - } - - #[test] - fn associated_keys_update() { - let pk1 = AccountHash::new([0u8; ACCOUNT_HASH_LENGTH]); - let pk2 = AccountHash::new([1u8; ACCOUNT_HASH_LENGTH]); - let weight = Weight::new(1); - let mut keys = AssociatedKeys::new(pk1, weight); - assert!(matches!( - keys.update_key(pk2, Weight::new(2)) - .expect_err("should get error"), - UpdateKeyFailure::MissingKey - )); - keys.add_key(pk2, Weight::new(1)).unwrap(); - assert_eq!(keys.get(&pk2), Some(&Weight::new(1))); - keys.update_key(pk2, Weight::new(2)).unwrap(); - assert_eq!(keys.get(&pk2), Some(&Weight::new(2))); - } - - #[test] - fn associated_keys_calculate_keys_once() { - let key_1 = AccountHash::new([0; 32]); - let key_2 = AccountHash::new([1; 32]); - let key_3 = AccountHash::new([2; 32]); - let mut keys = AssociatedKeys::default(); - - keys.add_key(key_2, Weight::new(2)) - .expect("should add key_1"); - keys.add_key(key_1, Weight::new(1)) - .expect("should add key_1"); - keys.add_key(key_3, Weight::new(3)) - .expect("should add key_1"); - - assert_eq!( - keys.calculate_keys_weight(&BTreeSet::from_iter(vec![ - key_1, key_2, key_3, key_1, key_2, key_3, - ])), - Weight::new(1 + 2 + 3) - ); - } - - #[test] - fn associated_keys_total_weight() { - let associated_keys = { - let mut res = AssociatedKeys::new(AccountHash::new([1u8; 32]), Weight::new(1)); - res.add_key(AccountHash::new([2u8; 32]), Weight::new(11)) - .expect("should add key 1"); - res.add_key(AccountHash::new([3u8; 32]), Weight::new(12)) - .expect("should add key 2"); - res.add_key(AccountHash::new([4u8; 32]), Weight::new(13)) - .expect("should add key 3"); - res - }; - assert_eq!( - associated_keys.total_keys_weight(), - Weight::new(1 + 11 + 12 + 13) - ); - } - - #[test] - fn associated_keys_total_weight_excluding() { - let identity_key = AccountHash::new([1u8; 32]); - let identity_key_weight = Weight::new(1); - - let key_1 = AccountHash::new([2u8; 32]); - let key_1_weight = Weight::new(11); - - let key_2 = AccountHash::new([3u8; 32]); - let key_2_weight = Weight::new(12); - - let key_3 = AccountHash::new([4u8; 32]); - let key_3_weight = Weight::new(13); - - let associated_keys = { - let mut res = AssociatedKeys::new(identity_key, identity_key_weight); - res.add_key(key_1, key_1_weight).expect("should add key 1"); - res.add_key(key_2, key_2_weight).expect("should add key 2"); - res.add_key(key_3, key_3_weight).expect("should add key 3"); - res - }; - assert_eq!( - associated_keys.total_keys_weight_excluding(key_2), - Weight::new(identity_key_weight.value() + key_1_weight.value() + key_3_weight.value()) - ); - } - - #[test] - fn overflowing_keys_weight() { - let identity_key = AccountHash::new([1u8; 32]); - let key_1 = AccountHash::new([2u8; 32]); - let key_2 = AccountHash::new([3u8; 32]); - let key_3 = AccountHash::new([4u8; 32]); - - let identity_key_weight = Weight::new(250); - let weight_1 = Weight::new(1); - let weight_2 = Weight::new(2); - let weight_3 = Weight::new(3); - - let saturated_weight = Weight::new(u8::max_value()); - - let associated_keys = { - let mut res = AssociatedKeys::new(identity_key, identity_key_weight); - - res.add_key(key_1, weight_1).expect("should add key 1"); - res.add_key(key_2, weight_2).expect("should add key 2"); - res.add_key(key_3, weight_3).expect("should add key 3"); - res - }; - - assert_eq!( - associated_keys.calculate_keys_weight(&BTreeSet::from_iter(vec![ - identity_key, // 250 - key_1, // 251 - key_2, // 253 - key_3, // 256 - error - ])), - saturated_weight, - ); - } - - #[test] - fn serialization_roundtrip() { - let mut keys = AssociatedKeys::default(); - keys.add_key(AccountHash::new([1; 32]), Weight::new(1)) - .unwrap(); - keys.add_key(AccountHash::new([2; 32]), Weight::new(2)) - .unwrap(); - keys.add_key(AccountHash::new([3; 32]), Weight::new(3)) - .unwrap(); - bytesrepr::test_serialization_roundtrip(&keys); - } -} diff --git a/casper_types_ver_2_0/src/account/error.rs b/casper_types_ver_2_0/src/account/error.rs deleted file mode 100644 index 35195fc7..00000000 --- a/casper_types_ver_2_0/src/account/error.rs +++ /dev/null @@ -1,43 +0,0 @@ -use core::{ - array::TryFromSliceError, - fmt::{self, Display, Formatter}, -}; - -/// Error returned when decoding an `AccountHash` from a formatted string. -#[derive(Debug)] -#[non_exhaustive] -pub enum FromStrError { - /// The prefix is invalid. - InvalidPrefix, - /// The hash is not valid hex. - Hex(base16::DecodeError), - /// The hash is the wrong length. - Hash(TryFromSliceError), -} - -impl From for FromStrError { - fn from(error: base16::DecodeError) -> Self { - FromStrError::Hex(error) - } -} - -impl From for FromStrError { - fn from(error: TryFromSliceError) -> Self { - FromStrError::Hash(error) - } -} - -impl Display for FromStrError { - fn fmt(&self, f: &mut Formatter) -> fmt::Result { - match self { - FromStrError::InvalidPrefix => write!(f, "prefix is not 'account-hash-'"), - FromStrError::Hex(error) => { - write!(f, "failed to decode address portion from hex: {}", error) - } - FromStrError::Hash(error) => write!(f, "address portion is wrong length: {}", error), - } - } -} -/// Associated error type of `TryFrom<&[u8]>` for [`AccountHash`](super::AccountHash). -#[derive(Debug)] -pub struct TryFromSliceForAccountHashError(()); diff --git a/casper_types_ver_2_0/src/account/weight.rs b/casper_types_ver_2_0/src/account/weight.rs deleted file mode 100644 index f9c87035..00000000 --- a/casper_types_ver_2_0/src/account/weight.rs +++ /dev/null @@ -1,69 +0,0 @@ -use alloc::vec::Vec; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; - -use serde::{Deserialize, Serialize}; - -use crate::{ - bytesrepr::{self, Error, FromBytes, ToBytes, U8_SERIALIZED_LENGTH}, - CLType, CLTyped, -}; - -/// The number of bytes in a serialized [`Weight`]. -pub const WEIGHT_SERIALIZED_LENGTH: usize = U8_SERIALIZED_LENGTH; - -/// The weight associated with public keys in an account's associated keys. -#[derive(PartialOrd, Ord, PartialEq, Eq, Clone, Copy, Debug, Serialize, Deserialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[cfg_attr( - feature = "json-schema", - schemars(rename = "AccountAssociatedKeyWeight") -)] -pub struct Weight(u8); - -impl Weight { - /// Maximum possible weight. - pub const MAX: Weight = Weight(u8::MAX); - - /// Constructs a new `Weight`. - pub const fn new(weight: u8) -> Weight { - Weight(weight) - } - - /// Returns the value of `self` as a `u8`. - pub fn value(self) -> u8 { - self.0 - } -} - -impl ToBytes for Weight { - fn to_bytes(&self) -> Result, Error> { - self.0.to_bytes() - } - - fn serialized_length(&self) -> usize { - WEIGHT_SERIALIZED_LENGTH - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - writer.push(self.0); - Ok(()) - } -} - -impl FromBytes for Weight { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (byte, rem) = u8::from_bytes(bytes)?; - Ok((Weight::new(byte), rem)) - } -} - -impl CLTyped for Weight { - fn cl_type() -> CLType { - CLType::U8 - } -} diff --git a/casper_types_ver_2_0/src/addressable_entity.rs b/casper_types_ver_2_0/src/addressable_entity.rs deleted file mode 100644 index 11f69c4c..00000000 --- a/casper_types_ver_2_0/src/addressable_entity.rs +++ /dev/null @@ -1,1714 +0,0 @@ -//! Data types for supporting contract headers feature. -// TODO - remove once schemars stops causing warning. -#![allow(clippy::field_reassign_with_default)] - -pub mod action_thresholds; -mod action_type; -pub mod associated_keys; -mod error; -mod named_keys; -mod weight; - -use alloc::{ - collections::{btree_map::Entry, BTreeMap, BTreeSet}, - format, - string::{String, ToString}, - vec::Vec, -}; -use core::{ - array::TryFromSliceError, - convert::TryFrom, - fmt::{self, Debug, Display, Formatter}, - iter, -}; -use num_derive::FromPrimitive; -use num_traits::FromPrimitive; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -use rand::{ - distributions::{Distribution, Standard}, - Rng, -}; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{de::Error as SerdeError, Deserialize, Deserializer, Serialize, Serializer}; -#[cfg(feature = "json-schema")] -use serde_map_to_array::KeyValueJsonSchema; -use serde_map_to_array::{BTreeMapToArray, KeyValueLabels}; - -pub use self::{ - action_thresholds::ActionThresholds, - action_type::ActionType, - associated_keys::AssociatedKeys, - error::{ - FromAccountHashStrError, SetThresholdFailure, TryFromIntError, - TryFromSliceForAccountHashError, - }, - named_keys::NamedKeys, - weight::{Weight, WEIGHT_SERIALIZED_LENGTH}, -}; - -use crate::{ - account::{Account, AccountHash}, - byte_code::ByteCodeHash, - bytesrepr::{self, FromBytes, ToBytes}, - checksummed_hex, - contract_messages::TopicNameHash, - contracts::{Contract, ContractHash}, - key::ByteCodeAddr, - uref::{self, URef}, - AccessRights, ApiError, CLType, CLTyped, ContextAccessRights, Group, HashAddr, Key, - PackageHash, ProtocolVersion, KEY_HASH_LENGTH, -}; - -/// Maximum number of distinct user groups. -pub const MAX_GROUPS: u8 = 10; -/// Maximum number of URefs which can be assigned across all user groups. -pub const MAX_TOTAL_UREFS: usize = 100; - -/// The tag for Contract Packages associated with Wasm stored on chain. -pub const PACKAGE_KIND_WASM_TAG: u8 = 0; -/// The tag for Contract Package associated with a native contract implementation. -pub const PACKAGE_KIND_SYSTEM_CONTRACT_TAG: u8 = 1; -/// The tag for Contract Package associated with an Account hash. -pub const PACKAGE_KIND_ACCOUNT_TAG: u8 = 2; -/// The tag for Contract Packages associated with legacy packages. -pub const PACKAGE_KIND_LEGACY_TAG: u8 = 3; - -const ADDRESSABLE_ENTITY_STRING_PREFIX: &str = "addressable-entity-"; - -/// Set of errors which may happen when working with contract headers. -#[derive(Debug, PartialEq, Eq)] -#[repr(u8)] -#[non_exhaustive] -pub enum Error { - /// Attempt to override an existing or previously existing version with a - /// new header (this is not allowed to ensure immutability of a given - /// version). - /// ``` - /// # use casper_types_ver_2_0::addressable_entity::Error; - /// assert_eq!(1, Error::PreviouslyUsedVersion as u8); - /// ``` - PreviouslyUsedVersion = 1, - /// Attempted to disable a contract that does not exist. - /// ``` - /// # use casper_types_ver_2_0::addressable_entity::Error; - /// assert_eq!(2, Error::EntityNotFound as u8); - /// ``` - EntityNotFound = 2, - /// Attempted to create a user group which already exists (use the update - /// function to change an existing user group). - /// ``` - /// # use casper_types_ver_2_0::addressable_entity::Error; - /// assert_eq!(3, Error::GroupAlreadyExists as u8); - /// ``` - GroupAlreadyExists = 3, - /// Attempted to add a new user group which exceeds the allowed maximum - /// number of groups. - /// ``` - /// # use casper_types_ver_2_0::addressable_entity::Error; - /// assert_eq!(4, Error::MaxGroupsExceeded as u8); - /// ``` - MaxGroupsExceeded = 4, - /// Attempted to add a new URef to a group, which resulted in the total - /// number of URefs across all user groups to exceed the allowed maximum. - /// ``` - /// # use casper_types_ver_2_0::addressable_entity::Error; - /// assert_eq!(5, Error::MaxTotalURefsExceeded as u8); - /// ``` - MaxTotalURefsExceeded = 5, - /// Attempted to remove a URef from a group, which does not exist in the - /// group. - /// ``` - /// # use casper_types_ver_2_0::addressable_entity::Error; - /// assert_eq!(6, Error::GroupDoesNotExist as u8); - /// ``` - GroupDoesNotExist = 6, - /// Attempted to remove unknown URef from the group. - /// ``` - /// # use casper_types_ver_2_0::addressable_entity::Error; - /// assert_eq!(7, Error::UnableToRemoveURef as u8); - /// ``` - UnableToRemoveURef = 7, - /// Group is use by at least one active contract. - /// ``` - /// # use casper_types_ver_2_0::addressable_entity::Error; - /// assert_eq!(8, Error::GroupInUse as u8); - /// ``` - GroupInUse = 8, - /// URef already exists in given group. - /// ``` - /// # use casper_types_ver_2_0::addressable_entity::Error; - /// assert_eq!(9, Error::URefAlreadyExists as u8); - /// ``` - URefAlreadyExists = 9, -} - -impl TryFrom for Error { - type Error = (); - - fn try_from(value: u8) -> Result { - let error = match value { - v if v == Self::PreviouslyUsedVersion as u8 => Self::PreviouslyUsedVersion, - v if v == Self::EntityNotFound as u8 => Self::EntityNotFound, - v if v == Self::GroupAlreadyExists as u8 => Self::GroupAlreadyExists, - v if v == Self::MaxGroupsExceeded as u8 => Self::MaxGroupsExceeded, - v if v == Self::MaxTotalURefsExceeded as u8 => Self::MaxTotalURefsExceeded, - v if v == Self::GroupDoesNotExist as u8 => Self::GroupDoesNotExist, - v if v == Self::UnableToRemoveURef as u8 => Self::UnableToRemoveURef, - v if v == Self::GroupInUse as u8 => Self::GroupInUse, - v if v == Self::URefAlreadyExists as u8 => Self::URefAlreadyExists, - _ => return Err(()), - }; - Ok(error) - } -} - -/// Associated error type of `TryFrom<&[u8]>` for `ContractHash`. -#[derive(Debug)] -pub struct TryFromSliceForContractHashError(()); - -impl Display for TryFromSliceForContractHashError { - fn fmt(&self, f: &mut Formatter) -> fmt::Result { - write!(f, "failed to retrieve from slice") - } -} - -/// An error from parsing a formatted contract string -#[derive(Debug)] -#[non_exhaustive] -pub enum FromStrError { - /// Invalid formatted string prefix. - InvalidPrefix, - /// Error when decoding a hex string - Hex(base16::DecodeError), - /// Error when parsing an account - Account(TryFromSliceForAccountHashError), - /// Error when parsing the hash. - Hash(TryFromSliceError), - /// Error when parsing an uref. - URef(uref::FromStrError), -} - -impl From for FromStrError { - fn from(error: base16::DecodeError) -> Self { - FromStrError::Hex(error) - } -} - -impl From for FromStrError { - fn from(error: TryFromSliceError) -> Self { - FromStrError::Hash(error) - } -} - -impl From for FromStrError { - fn from(error: uref::FromStrError) -> Self { - FromStrError::URef(error) - } -} - -impl Display for FromStrError { - fn fmt(&self, f: &mut Formatter) -> fmt::Result { - match self { - FromStrError::InvalidPrefix => write!(f, "invalid prefix"), - FromStrError::Hex(error) => write!(f, "decode from hex: {}", error), - FromStrError::Hash(error) => write!(f, "hash from string error: {}", error), - FromStrError::URef(error) => write!(f, "uref from string error: {:?}", error), - FromStrError::Account(error) => { - write!(f, "account hash from string error: {:?}", error) - } - } - } -} - -/// A newtype wrapping a `HashAddr` which references an [`AddressableEntity`] in the global state. -#[derive(Default, PartialOrd, Ord, PartialEq, Eq, Hash, Clone, Copy)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr( - feature = "json-schema", - derive(JsonSchema), - schemars(description = "The hex-encoded address of the addressable entity.") -)] -pub struct AddressableEntityHash( - #[cfg_attr(feature = "json-schema", schemars(skip, with = "String"))] HashAddr, -); - -impl AddressableEntityHash { - /// Constructs a new `AddressableEntityHash` from the raw bytes of the contract hash. - pub const fn new(value: HashAddr) -> AddressableEntityHash { - AddressableEntityHash(value) - } - - /// Returns the raw bytes of the contract hash as an array. - pub fn value(&self) -> HashAddr { - self.0 - } - - /// Returns the raw bytes of the contract hash as a `slice`. - pub fn as_bytes(&self) -> &[u8] { - &self.0 - } - - /// Formats the `AddressableEntityHash` for users getting and putting. - pub fn to_formatted_string(self) -> String { - format!( - "{}{}", - ADDRESSABLE_ENTITY_STRING_PREFIX, - base16::encode_lower(&self.0), - ) - } - - /// Parses a string formatted as per `Self::to_formatted_string()` into a - /// `AddressableEntityHash`. - pub fn from_formatted_str(input: &str) -> Result { - let remainder = input - .strip_prefix(ADDRESSABLE_ENTITY_STRING_PREFIX) - .ok_or(FromStrError::InvalidPrefix)?; - let bytes = HashAddr::try_from(checksummed_hex::decode(remainder)?.as_ref())?; - Ok(AddressableEntityHash(bytes)) - } -} - -impl From for AddressableEntityHash { - fn from(contract_hash: ContractHash) -> Self { - AddressableEntityHash::new(contract_hash.value()) - } -} - -impl Display for AddressableEntityHash { - fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result { - write!(f, "{}", base16::encode_lower(&self.0)) - } -} - -impl Debug for AddressableEntityHash { - fn fmt(&self, f: &mut Formatter) -> core::fmt::Result { - write!( - f, - "AddressableEntityHash({})", - base16::encode_lower(&self.0) - ) - } -} - -impl CLTyped for AddressableEntityHash { - fn cl_type() -> CLType { - CLType::ByteArray(KEY_HASH_LENGTH as u32) - } -} - -impl ToBytes for AddressableEntityHash { - #[inline(always)] - fn to_bytes(&self) -> Result, bytesrepr::Error> { - self.0.to_bytes() - } - - #[inline(always)] - fn serialized_length(&self) -> usize { - self.0.serialized_length() - } - - #[inline(always)] - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - writer.extend_from_slice(&self.0); - Ok(()) - } -} - -impl FromBytes for AddressableEntityHash { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (bytes, rem) = FromBytes::from_bytes(bytes)?; - Ok((AddressableEntityHash::new(bytes), rem)) - } -} - -impl From<[u8; 32]> for AddressableEntityHash { - fn from(bytes: [u8; 32]) -> Self { - AddressableEntityHash(bytes) - } -} - -impl TryFrom for AddressableEntityHash { - type Error = ApiError; - - fn try_from(value: Key) -> Result { - if let Key::AddressableEntity(_, entity_addr) = value { - Ok(AddressableEntityHash::new(entity_addr)) - } else { - Err(ApiError::Formatting) - } - } -} - -impl Serialize for AddressableEntityHash { - fn serialize(&self, serializer: S) -> Result { - if serializer.is_human_readable() { - self.to_formatted_string().serialize(serializer) - } else { - self.0.serialize(serializer) - } - } -} - -impl<'de> Deserialize<'de> for AddressableEntityHash { - fn deserialize>(deserializer: D) -> Result { - if deserializer.is_human_readable() { - let formatted_string = String::deserialize(deserializer)?; - AddressableEntityHash::from_formatted_str(&formatted_string).map_err(SerdeError::custom) - } else { - let bytes = HashAddr::deserialize(deserializer)?; - Ok(AddressableEntityHash(bytes)) - } - } -} - -impl AsRef<[u8]> for AddressableEntityHash { - fn as_ref(&self) -> &[u8] { - self.0.as_ref() - } -} - -impl TryFrom<&[u8]> for AddressableEntityHash { - type Error = TryFromSliceForContractHashError; - - fn try_from(bytes: &[u8]) -> Result { - HashAddr::try_from(bytes) - .map(AddressableEntityHash::new) - .map_err(|_| TryFromSliceForContractHashError(())) - } -} - -impl TryFrom<&Vec> for AddressableEntityHash { - type Error = TryFromSliceForContractHashError; - - fn try_from(bytes: &Vec) -> Result { - HashAddr::try_from(bytes as &[u8]) - .map(AddressableEntityHash::new) - .map_err(|_| TryFromSliceForContractHashError(())) - } -} - -impl Distribution for Standard { - fn sample(&self, rng: &mut R) -> AddressableEntityHash { - AddressableEntityHash(rng.gen()) - } -} - -/// Errors that can occur while adding a new [`AccountHash`] to an account's associated keys map. -#[derive(PartialEq, Eq, Debug, Copy, Clone)] -#[repr(i32)] -#[non_exhaustive] -pub enum AddKeyFailure { - /// There are already maximum [`AccountHash`]s associated with the given account. - MaxKeysLimit = 1, - /// The given [`AccountHash`] is already associated with the given account. - DuplicateKey = 2, - /// Caller doesn't have sufficient permissions to associate a new [`AccountHash`] with the - /// given account. - PermissionDenied = 3, -} - -impl Display for AddKeyFailure { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - match self { - AddKeyFailure::MaxKeysLimit => formatter.write_str( - "Unable to add new associated key because maximum amount of keys is reached", - ), - AddKeyFailure::DuplicateKey => formatter - .write_str("Unable to add new associated key because given key already exists"), - AddKeyFailure::PermissionDenied => formatter - .write_str("Unable to add new associated key due to insufficient permissions"), - } - } -} - -// This conversion is not intended to be used by third party crates. -#[doc(hidden)] -impl TryFrom for AddKeyFailure { - type Error = TryFromIntError; - - fn try_from(value: i32) -> Result { - match value { - d if d == AddKeyFailure::MaxKeysLimit as i32 => Ok(AddKeyFailure::MaxKeysLimit), - d if d == AddKeyFailure::DuplicateKey as i32 => Ok(AddKeyFailure::DuplicateKey), - d if d == AddKeyFailure::PermissionDenied as i32 => Ok(AddKeyFailure::PermissionDenied), - _ => Err(TryFromIntError(())), - } - } -} - -/// Errors that can occur while removing a [`AccountHash`] from an account's associated keys map. -#[derive(Debug, Eq, PartialEq, Copy, Clone)] -#[repr(i32)] -#[non_exhaustive] -pub enum RemoveKeyFailure { - /// The given [`AccountHash`] is not associated with the given account. - MissingKey = 1, - /// Caller doesn't have sufficient permissions to remove an associated [`AccountHash`] from the - /// given account. - PermissionDenied = 2, - /// Removing the given associated [`AccountHash`] would cause the total weight of all remaining - /// `AccountHash`s to fall below one of the action thresholds for the given account. - ThresholdViolation = 3, -} - -impl Display for RemoveKeyFailure { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - match self { - RemoveKeyFailure::MissingKey => { - formatter.write_str("Unable to remove a key that does not exist") - } - RemoveKeyFailure::PermissionDenied => formatter - .write_str("Unable to remove associated key due to insufficient permissions"), - RemoveKeyFailure::ThresholdViolation => formatter.write_str( - "Unable to remove a key which would violate action threshold constraints", - ), - } - } -} - -// This conversion is not intended to be used by third party crates. -#[doc(hidden)] -impl TryFrom for RemoveKeyFailure { - type Error = TryFromIntError; - - fn try_from(value: i32) -> Result { - match value { - d if d == RemoveKeyFailure::MissingKey as i32 => Ok(RemoveKeyFailure::MissingKey), - d if d == RemoveKeyFailure::PermissionDenied as i32 => { - Ok(RemoveKeyFailure::PermissionDenied) - } - d if d == RemoveKeyFailure::ThresholdViolation as i32 => { - Ok(RemoveKeyFailure::ThresholdViolation) - } - _ => Err(TryFromIntError(())), - } - } -} - -/// Errors that can occur while updating the [`Weight`] of a [`AccountHash`] in an account's -/// associated keys map. -#[derive(PartialEq, Eq, Debug, Copy, Clone)] -#[repr(i32)] -#[non_exhaustive] -pub enum UpdateKeyFailure { - /// The given [`AccountHash`] is not associated with the given account. - MissingKey = 1, - /// Caller doesn't have sufficient permissions to update an associated [`AccountHash`] from the - /// given account. - PermissionDenied = 2, - /// Updating the [`Weight`] of the given associated [`AccountHash`] would cause the total - /// weight of all `AccountHash`s to fall below one of the action thresholds for the given - /// account. - ThresholdViolation = 3, -} - -impl Display for UpdateKeyFailure { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - match self { - UpdateKeyFailure::MissingKey => formatter.write_str( - "Unable to update the value under an associated key that does not exist", - ), - UpdateKeyFailure::PermissionDenied => formatter - .write_str("Unable to update associated key due to insufficient permissions"), - UpdateKeyFailure::ThresholdViolation => formatter.write_str( - "Unable to update weight that would fall below any of action thresholds", - ), - } - } -} - -// This conversion is not intended to be used by third party crates. -#[doc(hidden)] -impl TryFrom for UpdateKeyFailure { - type Error = TryFromIntError; - - fn try_from(value: i32) -> Result { - match value { - d if d == UpdateKeyFailure::MissingKey as i32 => Ok(UpdateKeyFailure::MissingKey), - d if d == UpdateKeyFailure::PermissionDenied as i32 => { - Ok(UpdateKeyFailure::PermissionDenied) - } - d if d == UpdateKeyFailure::ThresholdViolation as i32 => { - Ok(UpdateKeyFailure::ThresholdViolation) - } - _ => Err(TryFromIntError(())), - } - } -} - -/// Collection of named entry points. -#[derive(Clone, PartialEq, Eq, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[serde(transparent, deny_unknown_fields)] -pub struct EntryPoints( - #[serde(with = "BTreeMapToArray::")] - BTreeMap, -); - -impl ToBytes for EntryPoints { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - self.0.to_bytes() - } - - fn serialized_length(&self) -> usize { - self.0.serialized_length() - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.0.write_bytes(writer) - } -} - -impl FromBytes for EntryPoints { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (entry_points_map, remainder) = BTreeMap::::from_bytes(bytes)?; - Ok((EntryPoints(entry_points_map), remainder)) - } -} - -impl Default for EntryPoints { - fn default() -> Self { - let mut entry_points = EntryPoints::new(); - let entry_point = EntryPoint::default(); - entry_points.add_entry_point(entry_point); - entry_points - } -} - -impl EntryPoints { - /// Constructs a new, empty `EntryPoints`. - pub const fn new() -> EntryPoints { - EntryPoints(BTreeMap::::new()) - } - - /// Constructs a new `EntryPoints` with a single entry for the default `EntryPoint`. - pub fn new_with_default_entry_point() -> Self { - let mut entry_points = EntryPoints::new(); - let entry_point = EntryPoint::default(); - entry_points.add_entry_point(entry_point); - entry_points - } - - /// Adds new [`EntryPoint`]. - pub fn add_entry_point(&mut self, entry_point: EntryPoint) { - self.0.insert(entry_point.name().to_string(), entry_point); - } - - /// Checks if given [`EntryPoint`] exists. - pub fn has_entry_point(&self, entry_point_name: &str) -> bool { - self.0.contains_key(entry_point_name) - } - - /// Gets an existing [`EntryPoint`] by its name. - pub fn get(&self, entry_point_name: &str) -> Option<&EntryPoint> { - self.0.get(entry_point_name) - } - - /// Returns iterator for existing entry point names. - pub fn keys(&self) -> impl Iterator { - self.0.keys() - } - - /// Takes all entry points. - pub fn take_entry_points(self) -> Vec { - self.0.into_values().collect() - } - - /// Returns the length of the entry points - pub fn len(&self) -> usize { - self.0.len() - } - - /// Checks if the `EntryPoints` is empty. - pub fn is_empty(&self) -> bool { - self.0.is_empty() - } - - /// Checks if any of the entry points are of the type Session. - pub fn contains_stored_session(&self) -> bool { - self.0 - .values() - .any(|entry_point| entry_point.entry_point_type == EntryPointType::Session) - } -} - -impl From> for EntryPoints { - fn from(entry_points: Vec) -> EntryPoints { - let entries = entry_points - .into_iter() - .map(|entry_point| (String::from(entry_point.name()), entry_point)) - .collect(); - EntryPoints(entries) - } -} - -struct EntryPointLabels; - -impl KeyValueLabels for EntryPointLabels { - const KEY: &'static str = "name"; - const VALUE: &'static str = "entry_point"; -} - -#[cfg(feature = "json-schema")] -impl KeyValueJsonSchema for EntryPointLabels { - const JSON_SCHEMA_KV_NAME: Option<&'static str> = Some("NamedEntryPoint"); -} - -/// Collection of named message topics. -#[derive(Clone, PartialEq, Eq, Serialize, Deserialize, Debug, Default)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[serde(transparent, deny_unknown_fields)] -pub struct MessageTopics( - #[serde(with = "BTreeMapToArray::")] - BTreeMap, -); - -impl ToBytes for MessageTopics { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - self.0.to_bytes() - } - - fn serialized_length(&self) -> usize { - self.0.serialized_length() - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.0.write_bytes(writer) - } -} - -impl FromBytes for MessageTopics { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (message_topics_map, remainder) = BTreeMap::::from_bytes(bytes)?; - Ok((MessageTopics(message_topics_map), remainder)) - } -} - -impl MessageTopics { - /// Adds new message topic by topic name. - pub fn add_topic( - &mut self, - topic_name: &str, - topic_name_hash: TopicNameHash, - ) -> Result<(), MessageTopicError> { - if self.0.len() >= u32::MAX as usize { - return Err(MessageTopicError::MaxTopicsExceeded); - } - - match self.0.entry(topic_name.to_string()) { - Entry::Vacant(entry) => { - entry.insert(topic_name_hash); - Ok(()) - } - Entry::Occupied(_) => Err(MessageTopicError::DuplicateTopic), - } - } - - /// Checks if given topic name exists. - pub fn has_topic(&self, topic_name: &str) -> bool { - self.0.contains_key(topic_name) - } - - /// Gets the topic hash from the collection by its topic name. - pub fn get(&self, topic_name: &str) -> Option<&TopicNameHash> { - self.0.get(topic_name) - } - - /// Returns the length of the message topics. - pub fn len(&self) -> usize { - self.0.len() - } - - /// Returns true if no message topics are registered. - pub fn is_empty(&self) -> bool { - self.0.is_empty() - } - - /// Returns an iterator over the topic name and its hash. - pub fn iter(&self) -> impl Iterator { - self.0.iter() - } -} - -struct MessageTopicLabels; - -impl KeyValueLabels for MessageTopicLabels { - const KEY: &'static str = "topic_name"; - const VALUE: &'static str = "topic_name_hash"; -} - -#[cfg(feature = "json-schema")] -impl KeyValueJsonSchema for MessageTopicLabels { - const JSON_SCHEMA_KV_NAME: Option<&'static str> = Some("MessageTopic"); -} - -impl From> for MessageTopics { - fn from(topics: BTreeMap) -> MessageTopics { - MessageTopics(topics) - } -} - -/// Errors that can occur while adding a new topic. -#[derive(PartialEq, Eq, Debug, Clone)] -#[non_exhaustive] -pub enum MessageTopicError { - /// Topic already exists. - DuplicateTopic, - /// Maximum number of topics exceeded. - MaxTopicsExceeded, - /// Topic name size exceeded. - TopicNameSizeExceeded, -} - -/// Methods and type signatures supported by a contract. -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -pub struct AddressableEntity { - package_hash: PackageHash, - byte_code_hash: ByteCodeHash, - named_keys: NamedKeys, - entry_points: EntryPoints, - protocol_version: ProtocolVersion, - main_purse: URef, - associated_keys: AssociatedKeys, - action_thresholds: ActionThresholds, - message_topics: MessageTopics, -} - -impl From - for ( - PackageHash, - ByteCodeHash, - NamedKeys, - EntryPoints, - ProtocolVersion, - URef, - AssociatedKeys, - ActionThresholds, - ) -{ - fn from(entity: AddressableEntity) -> Self { - ( - entity.package_hash, - entity.byte_code_hash, - entity.named_keys, - entity.entry_points, - entity.protocol_version, - entity.main_purse, - entity.associated_keys, - entity.action_thresholds, - ) - } -} - -impl AddressableEntity { - /// `AddressableEntity` constructor. - #[allow(clippy::too_many_arguments)] - pub fn new( - package_hash: PackageHash, - byte_code_hash: ByteCodeHash, - named_keys: NamedKeys, - entry_points: EntryPoints, - protocol_version: ProtocolVersion, - main_purse: URef, - associated_keys: AssociatedKeys, - action_thresholds: ActionThresholds, - message_topics: MessageTopics, - ) -> Self { - AddressableEntity { - package_hash, - byte_code_hash, - named_keys, - entry_points, - protocol_version, - main_purse, - action_thresholds, - associated_keys, - message_topics, - } - } - - /// Hash for accessing contract package - pub fn package_hash(&self) -> PackageHash { - self.package_hash - } - - /// Hash for accessing contract WASM - pub fn byte_code_hash(&self) -> ByteCodeHash { - self.byte_code_hash - } - - /// Checks whether there is a method with the given name - pub fn has_entry_point(&self, name: &str) -> bool { - self.entry_points.has_entry_point(name) - } - - /// Returns the type signature for the given `method`. - pub fn entry_point(&self, method: &str) -> Option<&EntryPoint> { - self.entry_points.get(method) - } - - /// Get the protocol version this header is targeting. - pub fn protocol_version(&self) -> ProtocolVersion { - self.protocol_version - } - - /// Returns main purse. - pub fn main_purse(&self) -> URef { - self.main_purse - } - - /// Returns an [`AccessRights::ADD`]-only version of the main purse's [`URef`]. - pub fn main_purse_add_only(&self) -> URef { - URef::new(self.main_purse.addr(), AccessRights::ADD) - } - - /// Returns associated keys. - pub fn associated_keys(&self) -> &AssociatedKeys { - &self.associated_keys - } - - /// Returns action thresholds. - pub fn action_thresholds(&self) -> &ActionThresholds { - &self.action_thresholds - } - - /// Adds an associated key to an addressable entity. - pub fn add_associated_key( - &mut self, - account_hash: AccountHash, - weight: Weight, - ) -> Result<(), AddKeyFailure> { - self.associated_keys.add_key(account_hash, weight) - } - - /// Checks if removing given key would properly satisfy thresholds. - fn can_remove_key(&self, account_hash: AccountHash) -> bool { - let total_weight_without = self - .associated_keys - .total_keys_weight_excluding(account_hash); - - // Returns true if the total weight calculated without given public key would be greater or - // equal to all of the thresholds. - total_weight_without >= *self.action_thresholds().deployment() - && total_weight_without >= *self.action_thresholds().key_management() - } - - /// Checks if adding a weight to a sum of all weights excluding the given key would make the - /// resulting value to fall below any of the thresholds on account. - fn can_update_key(&self, account_hash: AccountHash, weight: Weight) -> bool { - // Calculates total weight of all keys excluding the given key - let total_weight = self - .associated_keys - .total_keys_weight_excluding(account_hash); - - // Safely calculate new weight by adding the updated weight - let new_weight = total_weight.value().saturating_add(weight.value()); - - // Returns true if the new weight would be greater or equal to all of - // the thresholds. - new_weight >= self.action_thresholds().deployment().value() - && new_weight >= self.action_thresholds().key_management().value() - } - - /// Removes an associated key from an addressable entity. - /// - /// Verifies that removing the key will not cause the remaining weight to fall below any action - /// thresholds. - pub fn remove_associated_key( - &mut self, - account_hash: AccountHash, - ) -> Result<(), RemoveKeyFailure> { - if self.associated_keys.contains_key(&account_hash) { - // Check if removing this weight would fall below thresholds - if !self.can_remove_key(account_hash) { - return Err(RemoveKeyFailure::ThresholdViolation); - } - } - self.associated_keys.remove_key(&account_hash) - } - - /// Updates an associated key. - /// - /// Returns an error if the update would result in a violation of the key management thresholds. - pub fn update_associated_key( - &mut self, - account_hash: AccountHash, - weight: Weight, - ) -> Result<(), UpdateKeyFailure> { - if let Some(current_weight) = self.associated_keys.get(&account_hash) { - if weight < *current_weight { - // New weight is smaller than current weight - if !self.can_update_key(account_hash, weight) { - return Err(UpdateKeyFailure::ThresholdViolation); - } - } - } - self.associated_keys.update_key(account_hash, weight) - } - - /// Sets new action threshold for a given action type for the addressable entity. - /// - /// Returns an error if the new action threshold weight is greater than the total weight of the - /// account's associated keys. - pub fn set_action_threshold( - &mut self, - action_type: ActionType, - weight: Weight, - ) -> Result<(), SetThresholdFailure> { - // Verify if new threshold weight exceeds total weight of all associated - // keys. - self.can_set_threshold(weight)?; - // Set new weight for given action - self.action_thresholds.set_threshold(action_type, weight) - } - - /// Sets a new action threshold for a given action type for the account without checking against - /// the total weight of the associated keys. - /// - /// This should only be called when authorized by an administrator account. - /// - /// Returns an error if setting the action would cause the `ActionType::Deployment` threshold to - /// be greater than any of the other action types. - pub fn set_action_threshold_unchecked( - &mut self, - action_type: ActionType, - threshold: Weight, - ) -> Result<(), SetThresholdFailure> { - self.action_thresholds.set_threshold(action_type, threshold) - } - - /// Verifies if user can set action threshold. - pub fn can_set_threshold(&self, new_threshold: Weight) -> Result<(), SetThresholdFailure> { - let total_weight = self.associated_keys.total_keys_weight(); - if new_threshold > total_weight { - return Err(SetThresholdFailure::InsufficientTotalWeight); - } - Ok(()) - } - - /// Checks whether all authorization keys are associated with this addressable entity. - pub fn can_authorize(&self, authorization_keys: &BTreeSet) -> bool { - !authorization_keys.is_empty() - && authorization_keys - .iter() - .any(|e| self.associated_keys.contains_key(e)) - } - - /// Checks whether the sum of the weights of all authorization keys is - /// greater or equal to deploy threshold. - pub fn can_deploy_with(&self, authorization_keys: &BTreeSet) -> bool { - let total_weight = self - .associated_keys - .calculate_keys_weight(authorization_keys); - - total_weight >= *self.action_thresholds().deployment() - } - - /// Checks whether the sum of the weights of all authorization keys is - /// greater or equal to key management threshold. - pub fn can_manage_keys_with(&self, authorization_keys: &BTreeSet) -> bool { - let total_weight = self - .associated_keys - .calculate_keys_weight(authorization_keys); - - total_weight >= *self.action_thresholds().key_management() - } - - /// Checks whether the sum of the weights of all authorization keys is - /// greater or equal to upgrade management threshold. - pub fn can_upgrade_with(&self, authorization_keys: &BTreeSet) -> bool { - let total_weight = self - .associated_keys - .calculate_keys_weight(authorization_keys); - - total_weight >= *self.action_thresholds().upgrade_management() - } - - /// Adds new entry point - pub fn add_entry_point>(&mut self, entry_point: EntryPoint) { - self.entry_points.add_entry_point(entry_point); - } - - /// Addr for accessing wasm bytes - pub fn byte_code_addr(&self) -> ByteCodeAddr { - self.byte_code_hash.value() - } - - /// Returns immutable reference to methods - pub fn entry_points(&self) -> &EntryPoints { - &self.entry_points - } - - /// Returns a reference to the message topics - pub fn message_topics(&self) -> &MessageTopics { - &self.message_topics - } - - /// Adds a new message topic to the entity - pub fn add_message_topic( - &mut self, - topic_name: &str, - topic_name_hash: TopicNameHash, - ) -> Result<(), MessageTopicError> { - self.message_topics.add_topic(topic_name, topic_name_hash) - } - - /// Takes `named_keys` - pub fn take_named_keys(self) -> NamedKeys { - self.named_keys - } - - /// Returns a reference to `named_keys` - pub fn named_keys(&self) -> &NamedKeys { - &self.named_keys - } - - /// Appends `keys` to `named_keys` - pub fn named_keys_append(&mut self, keys: NamedKeys) { - self.named_keys.append(keys); - } - - /// Removes given named key. - pub fn remove_named_key(&mut self, key: &str) -> Option { - self.named_keys.remove(key) - } - - /// Set protocol_version. - pub fn set_protocol_version(&mut self, protocol_version: ProtocolVersion) { - self.protocol_version = protocol_version; - } - - /// Determines if `AddressableEntity` is compatible with a given `ProtocolVersion`. - pub fn is_compatible_protocol_version(&self, protocol_version: ProtocolVersion) -> bool { - self.protocol_version.value().major == protocol_version.value().major - } - - /// Extracts the access rights from the named keys of the addressable entity. - pub fn extract_access_rights(&self, entity_hash: AddressableEntityHash) -> ContextAccessRights { - let urefs_iter = self - .named_keys - .keys() - .filter_map(|key| key.as_uref().copied()) - .chain(iter::once(self.main_purse)); - ContextAccessRights::new(entity_hash, urefs_iter) - } - - /// Update the byte code hash for a given Entity associated with an Account. - pub fn update_session_entity( - self, - byte_code_hash: ByteCodeHash, - entry_points: EntryPoints, - ) -> Self { - Self { - package_hash: self.package_hash, - byte_code_hash, - named_keys: self.named_keys, - entry_points, - protocol_version: self.protocol_version, - main_purse: self.main_purse, - associated_keys: self.associated_keys, - action_thresholds: self.action_thresholds, - message_topics: self.message_topics, - } - } -} - -impl ToBytes for AddressableEntity { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut result = bytesrepr::allocate_buffer(self)?; - self.package_hash().write_bytes(&mut result)?; - self.byte_code_hash().write_bytes(&mut result)?; - self.named_keys().write_bytes(&mut result)?; - self.entry_points().write_bytes(&mut result)?; - self.protocol_version().write_bytes(&mut result)?; - self.main_purse().write_bytes(&mut result)?; - self.associated_keys().write_bytes(&mut result)?; - self.action_thresholds().write_bytes(&mut result)?; - self.message_topics().write_bytes(&mut result)?; - Ok(result) - } - - fn serialized_length(&self) -> usize { - ToBytes::serialized_length(&self.entry_points) - + ToBytes::serialized_length(&self.package_hash) - + ToBytes::serialized_length(&self.byte_code_hash) - + ToBytes::serialized_length(&self.protocol_version) - + ToBytes::serialized_length(&self.named_keys) - + ToBytes::serialized_length(&self.main_purse) - + ToBytes::serialized_length(&self.associated_keys) - + ToBytes::serialized_length(&self.action_thresholds) - + ToBytes::serialized_length(&self.message_topics) - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.package_hash().write_bytes(writer)?; - self.byte_code_hash().write_bytes(writer)?; - self.named_keys().write_bytes(writer)?; - self.entry_points().write_bytes(writer)?; - self.protocol_version().write_bytes(writer)?; - self.main_purse().write_bytes(writer)?; - self.associated_keys().write_bytes(writer)?; - self.action_thresholds().write_bytes(writer)?; - self.message_topics().write_bytes(writer)?; - Ok(()) - } -} - -impl FromBytes for AddressableEntity { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (package_hash, bytes) = PackageHash::from_bytes(bytes)?; - let (contract_wasm_hash, bytes) = ByteCodeHash::from_bytes(bytes)?; - let (named_keys, bytes) = NamedKeys::from_bytes(bytes)?; - let (entry_points, bytes) = EntryPoints::from_bytes(bytes)?; - let (protocol_version, bytes) = ProtocolVersion::from_bytes(bytes)?; - let (main_purse, bytes) = URef::from_bytes(bytes)?; - let (associated_keys, bytes) = AssociatedKeys::from_bytes(bytes)?; - let (action_thresholds, bytes) = ActionThresholds::from_bytes(bytes)?; - let (message_topics, bytes) = MessageTopics::from_bytes(bytes)?; - Ok(( - AddressableEntity { - package_hash, - byte_code_hash: contract_wasm_hash, - named_keys, - entry_points, - protocol_version, - main_purse, - associated_keys, - action_thresholds, - message_topics, - }, - bytes, - )) - } -} - -impl Default for AddressableEntity { - fn default() -> Self { - AddressableEntity { - named_keys: NamedKeys::new(), - entry_points: EntryPoints::new_with_default_entry_point(), - byte_code_hash: [0; KEY_HASH_LENGTH].into(), - package_hash: [0; KEY_HASH_LENGTH].into(), - protocol_version: ProtocolVersion::V1_0_0, - main_purse: URef::default(), - action_thresholds: ActionThresholds::default(), - associated_keys: AssociatedKeys::default(), - message_topics: MessageTopics::default(), - } - } -} - -impl From for AddressableEntity { - fn from(value: Contract) -> Self { - AddressableEntity::new( - PackageHash::new(value.contract_package_hash().value()), - ByteCodeHash::new(value.contract_wasm_hash().value()), - value.named_keys().clone(), - value.entry_points().clone(), - value.protocol_version(), - URef::default(), - AssociatedKeys::default(), - ActionThresholds::default(), - MessageTopics::default(), - ) - } -} - -impl From for AddressableEntity { - fn from(value: Account) -> Self { - AddressableEntity::new( - PackageHash::default(), - ByteCodeHash::new([0u8; 32]), - value.named_keys().clone(), - EntryPoints::new(), - ProtocolVersion::default(), - value.main_purse(), - value.associated_keys().clone().into(), - value.action_thresholds().clone().into(), - MessageTopics::default(), - ) - } -} - -/// Context of method execution -/// -/// Most significant bit represents version i.e. -/// - 0b0 -> 0.x/1.x (session & contracts) -/// - 0b1 -> 2.x and later (introduced installer, utility entry points) -#[derive(Debug, Copy, Clone, PartialEq, Eq, Serialize, Deserialize, FromPrimitive)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -pub enum EntryPointType { - /// Runs as session code (caller) - /// Deprecated, retained to allow read back of legacy stored session. - Session = 0b00000000, - /// Runs within called entity's context (called) - AddressableEntity = 0b00000001, - /// This entry point is intended to extract a subset of bytecode. - /// Runs within called entity's context (called) - Factory = 0b10000000, -} - -impl EntryPointType { - /// Checks if entry point type is introduced before 2.0. - /// - /// This method checks if there is a bit pattern for entry point types introduced in 2.0. - /// - /// If this bit is missing, that means given entry point type was defined in pre-2.0 world. - pub fn is_legacy_pattern(&self) -> bool { - (*self as u8) & 0b10000000 == 0 - } - - /// Get the bit pattern. - pub fn bits(self) -> u8 { - self as u8 - } - - /// Returns true if entry point type is invalid for the context. - pub fn is_invalid_context(&self) -> bool { - match self { - EntryPointType::Session => true, - EntryPointType::AddressableEntity | EntryPointType::Factory => false, - } - } -} - -impl ToBytes for EntryPointType { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - self.bits().to_bytes() - } - - fn serialized_length(&self) -> usize { - 1 - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - writer.push(self.bits()); - Ok(()) - } -} - -impl FromBytes for EntryPointType { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (value, bytes) = u8::from_bytes(bytes)?; - let entry_point_type = - EntryPointType::from_u8(value).ok_or(bytesrepr::Error::Formatting)?; - Ok((entry_point_type, bytes)) - } -} - -/// Default name for an entry point. -pub const DEFAULT_ENTRY_POINT_NAME: &str = "call"; - -/// Name for an installer entry point. -pub const INSTALL_ENTRY_POINT_NAME: &str = "install"; - -/// Name for an upgrade entry point. -pub const UPGRADE_ENTRY_POINT_NAME: &str = "upgrade"; - -/// Collection of entry point parameters. -pub type Parameters = Vec; - -/// Type signature of a method. Order of arguments matter since can be -/// referenced by index as well as name. -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -pub struct EntryPoint { - name: String, - args: Parameters, - ret: CLType, - access: EntryPointAccess, - entry_point_type: EntryPointType, -} - -impl From for (String, Parameters, CLType, EntryPointAccess, EntryPointType) { - fn from(entry_point: EntryPoint) -> Self { - ( - entry_point.name, - entry_point.args, - entry_point.ret, - entry_point.access, - entry_point.entry_point_type, - ) - } -} - -impl EntryPoint { - /// `EntryPoint` constructor. - pub fn new>( - name: T, - args: Parameters, - ret: CLType, - access: EntryPointAccess, - entry_point_type: EntryPointType, - ) -> Self { - EntryPoint { - name: name.into(), - args, - ret, - access, - entry_point_type, - } - } - - /// Create a default [`EntryPoint`] with specified name. - pub fn default_with_name>(name: T) -> Self { - EntryPoint { - name: name.into(), - ..Default::default() - } - } - - /// Get name. - pub fn name(&self) -> &str { - &self.name - } - - /// Get access enum. - pub fn access(&self) -> &EntryPointAccess { - &self.access - } - - /// Get the arguments for this method. - pub fn args(&self) -> &[Parameter] { - self.args.as_slice() - } - - /// Get the return type. - pub fn ret(&self) -> &CLType { - &self.ret - } - - /// Obtains entry point - pub fn entry_point_type(&self) -> EntryPointType { - self.entry_point_type - } -} - -impl Default for EntryPoint { - /// constructor for a public session `EntryPoint` that takes no args and returns `Unit` - fn default() -> Self { - EntryPoint { - name: DEFAULT_ENTRY_POINT_NAME.to_string(), - args: Vec::new(), - ret: CLType::Unit, - access: EntryPointAccess::Public, - entry_point_type: EntryPointType::Session, - } - } -} - -impl ToBytes for EntryPoint { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - self.name.serialized_length() - + self.args.serialized_length() - + self.ret.serialized_length() - + self.access.serialized_length() - + self.entry_point_type.serialized_length() - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.name.write_bytes(writer)?; - self.args.write_bytes(writer)?; - self.ret.append_bytes(writer)?; - self.access.write_bytes(writer)?; - self.entry_point_type.write_bytes(writer)?; - Ok(()) - } -} - -impl FromBytes for EntryPoint { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (name, bytes) = String::from_bytes(bytes)?; - let (args, bytes) = Vec::::from_bytes(bytes)?; - let (ret, bytes) = CLType::from_bytes(bytes)?; - let (access, bytes) = EntryPointAccess::from_bytes(bytes)?; - let (entry_point_type, bytes) = EntryPointType::from_bytes(bytes)?; - - Ok(( - EntryPoint { - name, - args, - ret, - access, - entry_point_type, - }, - bytes, - )) - } -} - -/// Enum describing the possible access control options for a contract entry -/// point (method). -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -pub enum EntryPointAccess { - /// Anyone can call this method (no access controls). - Public, - /// Only users from the listed groups may call this method. Note: if the - /// list is empty then this method is not callable from outside the - /// contract. - Groups(Vec), - /// Can't be accessed directly but are kept in the derived wasm bytes. - Template, -} - -const ENTRYPOINTACCESS_PUBLIC_TAG: u8 = 1; -const ENTRYPOINTACCESS_GROUPS_TAG: u8 = 2; -const ENTRYPOINTACCESS_ABSTRACT_TAG: u8 = 3; - -impl EntryPointAccess { - /// Constructor for access granted to only listed groups. - pub fn groups(labels: &[&str]) -> Self { - let list: Vec = labels - .iter() - .map(|s| Group::new(String::from(*s))) - .collect(); - EntryPointAccess::Groups(list) - } -} - -impl ToBytes for EntryPointAccess { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut result = bytesrepr::allocate_buffer(self)?; - - match self { - EntryPointAccess::Public => { - result.push(ENTRYPOINTACCESS_PUBLIC_TAG); - } - EntryPointAccess::Groups(groups) => { - result.push(ENTRYPOINTACCESS_GROUPS_TAG); - result.append(&mut groups.to_bytes()?); - } - EntryPointAccess::Template => { - result.push(ENTRYPOINTACCESS_ABSTRACT_TAG); - } - } - Ok(result) - } - - fn serialized_length(&self) -> usize { - match self { - EntryPointAccess::Public => 1, - EntryPointAccess::Groups(groups) => 1 + groups.serialized_length(), - EntryPointAccess::Template => 1, - } - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - match self { - EntryPointAccess::Public => { - writer.push(ENTRYPOINTACCESS_PUBLIC_TAG); - } - EntryPointAccess::Groups(groups) => { - writer.push(ENTRYPOINTACCESS_GROUPS_TAG); - groups.write_bytes(writer)?; - } - EntryPointAccess::Template => { - writer.push(ENTRYPOINTACCESS_ABSTRACT_TAG); - } - } - Ok(()) - } -} - -impl FromBytes for EntryPointAccess { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (tag, bytes) = u8::from_bytes(bytes)?; - - match tag { - ENTRYPOINTACCESS_PUBLIC_TAG => Ok((EntryPointAccess::Public, bytes)), - ENTRYPOINTACCESS_GROUPS_TAG => { - let (groups, bytes) = Vec::::from_bytes(bytes)?; - let result = EntryPointAccess::Groups(groups); - Ok((result, bytes)) - } - ENTRYPOINTACCESS_ABSTRACT_TAG => Ok((EntryPointAccess::Template, bytes)), - _ => Err(bytesrepr::Error::Formatting), - } - } -} - -/// Parameter to a method -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -pub struct Parameter { - name: String, - cl_type: CLType, -} - -impl Parameter { - /// `Parameter` constructor. - pub fn new>(name: T, cl_type: CLType) -> Self { - Parameter { - name: name.into(), - cl_type, - } - } - - /// Get the type of this argument. - pub fn cl_type(&self) -> &CLType { - &self.cl_type - } - - /// Get a reference to the parameter's name. - pub fn name(&self) -> &str { - &self.name - } -} - -impl From for (String, CLType) { - fn from(parameter: Parameter) -> Self { - (parameter.name, parameter.cl_type) - } -} - -impl ToBytes for Parameter { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut result = ToBytes::to_bytes(&self.name)?; - self.cl_type.append_bytes(&mut result)?; - - Ok(result) - } - - fn serialized_length(&self) -> usize { - ToBytes::serialized_length(&self.name) + self.cl_type.serialized_length() - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.name.write_bytes(writer)?; - self.cl_type.append_bytes(writer) - } -} - -impl FromBytes for Parameter { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (name, bytes) = String::from_bytes(bytes)?; - let (cl_type, bytes) = CLType::from_bytes(bytes)?; - - Ok((Parameter { name, cl_type }, bytes)) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::{AccessRights, URef, UREF_ADDR_LENGTH}; - - #[test] - fn entity_hash_from_slice() { - let bytes: Vec = (0..32).collect(); - let entity_hash = HashAddr::try_from(&bytes[..]).expect("should create contract hash"); - let entity_hash = AddressableEntityHash::new(entity_hash); - assert_eq!(&bytes, &entity_hash.as_bytes()); - } - - #[test] - fn entity_hash_from_str() { - let entity_hash = AddressableEntityHash([3; 32]); - let encoded = entity_hash.to_formatted_string(); - let decoded = AddressableEntityHash::from_formatted_str(&encoded).unwrap(); - assert_eq!(entity_hash, decoded); - - let invalid_prefix = - "addressable-entity--0000000000000000000000000000000000000000000000000000000000000000"; - assert!(AddressableEntityHash::from_formatted_str(invalid_prefix).is_err()); - - let short_addr = - "addressable-entity-00000000000000000000000000000000000000000000000000000000000000"; - assert!(AddressableEntityHash::from_formatted_str(short_addr).is_err()); - - let long_addr = - "addressable-entity-000000000000000000000000000000000000000000000000000000000000000000"; - assert!(AddressableEntityHash::from_formatted_str(long_addr).is_err()); - - let invalid_hex = - "addressable-entity-000000000000000000000000000000000000000000000000000000000000000g"; - assert!(AddressableEntityHash::from_formatted_str(invalid_hex).is_err()); - } - - #[test] - fn entity_hash_serde_roundtrip() { - let entity_hash = AddressableEntityHash([255; 32]); - let serialized = bincode::serialize(&entity_hash).unwrap(); - let deserialized = bincode::deserialize(&serialized).unwrap(); - assert_eq!(entity_hash, deserialized) - } - - #[test] - fn entity_hash_json_roundtrip() { - let entity_hash = AddressableEntityHash([255; 32]); - let json_string = serde_json::to_string_pretty(&entity_hash).unwrap(); - let decoded = serde_json::from_str(&json_string).unwrap(); - assert_eq!(entity_hash, decoded) - } - - #[test] - fn should_extract_access_rights() { - const MAIN_PURSE: URef = URef::new([2; 32], AccessRights::READ_ADD_WRITE); - - let entity_hash = AddressableEntityHash([255; 32]); - let uref = URef::new([84; UREF_ADDR_LENGTH], AccessRights::READ_ADD); - let uref_r = URef::new([42; UREF_ADDR_LENGTH], AccessRights::READ); - let uref_a = URef::new([42; UREF_ADDR_LENGTH], AccessRights::ADD); - let uref_w = URef::new([42; UREF_ADDR_LENGTH], AccessRights::WRITE); - let mut named_keys = NamedKeys::new(); - named_keys.insert("a".to_string(), Key::URef(uref_r)); - named_keys.insert("b".to_string(), Key::URef(uref_a)); - named_keys.insert("c".to_string(), Key::URef(uref_w)); - named_keys.insert("d".to_string(), Key::URef(uref)); - let associated_keys = AssociatedKeys::new(AccountHash::new([254; 32]), Weight::new(1)); - let contract = AddressableEntity::new( - PackageHash::new([254; 32]), - ByteCodeHash::new([253; 32]), - named_keys, - EntryPoints::new_with_default_entry_point(), - ProtocolVersion::V1_0_0, - MAIN_PURSE, - associated_keys, - ActionThresholds::new(Weight::new(1), Weight::new(1), Weight::new(1)) - .expect("should create thresholds"), - MessageTopics::default(), - ); - let access_rights = contract.extract_access_rights(entity_hash); - let expected_uref = URef::new([42; UREF_ADDR_LENGTH], AccessRights::READ_ADD_WRITE); - assert!( - access_rights.has_access_rights_to_uref(&uref), - "urefs in named keys should be included in access rights" - ); - assert!( - access_rights.has_access_rights_to_uref(&expected_uref), - "multiple access right bits to the same uref should coalesce" - ); - } -} - -#[cfg(test)] -mod prop_tests { - use proptest::prelude::*; - - use crate::{bytesrepr, gens}; - - proptest! { - #[test] - fn test_value_contract(contract in gens::addressable_entity_arb()) { - bytesrepr::test_serialization_roundtrip(&contract); - } - } -} diff --git a/casper_types_ver_2_0/src/addressable_entity/action_thresholds.rs b/casper_types_ver_2_0/src/addressable_entity/action_thresholds.rs deleted file mode 100644 index 4d6d58b9..00000000 --- a/casper_types_ver_2_0/src/addressable_entity/action_thresholds.rs +++ /dev/null @@ -1,212 +0,0 @@ -//! This module contains types and functions for managing action thresholds. - -use alloc::vec::Vec; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -use crate::{ - account::ActionThresholds as AccountActionThresholds, - addressable_entity::{ActionType, SetThresholdFailure, Weight, WEIGHT_SERIALIZED_LENGTH}, - bytesrepr::{self, Error, FromBytes, ToBytes}, -}; - -/// Thresholds that have to be met when executing an action of a certain type. -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[cfg_attr(feature = "json-schema", schemars(rename = "EntityActionThresholds"))] -pub struct ActionThresholds { - /// Threshold for deploy execution. - pub deployment: Weight, - /// Threshold for upgrading contracts. - pub upgrade_management: Weight, - /// Threshold for managing action threshold. - pub key_management: Weight, -} - -impl ActionThresholds { - /// Creates new ActionThresholds object with provided weights - /// - /// Requires deployment threshold to be lower than or equal to - /// key management threshold. - pub fn new( - deployment: Weight, - upgrade_management: Weight, - key_management: Weight, - ) -> Result { - if deployment > key_management { - return Err(SetThresholdFailure::DeploymentThreshold); - } - Ok(ActionThresholds { - deployment, - upgrade_management, - key_management, - }) - } - /// Sets new threshold for [ActionType::Deployment]. - /// Should return an error if setting new threshold for `action_type` breaks - /// one of the invariants. Currently, invariant is that - /// `ActionType::Deployment` threshold shouldn't be higher than any - /// other, which should be checked both when increasing `Deployment` - /// threshold and decreasing the other. - pub fn set_deployment_threshold( - &mut self, - new_threshold: Weight, - ) -> Result<(), SetThresholdFailure> { - if new_threshold > self.key_management { - Err(SetThresholdFailure::DeploymentThreshold) - } else { - self.deployment = new_threshold; - Ok(()) - } - } - - /// Sets new threshold for [ActionType::KeyManagement]. - pub fn set_key_management_threshold( - &mut self, - new_threshold: Weight, - ) -> Result<(), SetThresholdFailure> { - if self.deployment > new_threshold { - Err(SetThresholdFailure::KeyManagementThreshold) - } else { - self.key_management = new_threshold; - Ok(()) - } - } - - /// Sets new threshold for [ActionType::UpgradeManagement]. - pub fn set_upgrade_management_threshold( - &mut self, - upgrade_management: Weight, - ) -> Result<(), SetThresholdFailure> { - self.upgrade_management = upgrade_management; - Ok(()) - } - - /// Returns the deployment action threshold. - pub fn deployment(&self) -> &Weight { - &self.deployment - } - - /// Returns key management action threshold. - pub fn key_management(&self) -> &Weight { - &self.key_management - } - - /// Returns the upgrade management action threshold. - pub fn upgrade_management(&self) -> &Weight { - &self.upgrade_management - } - - /// Unified function that takes an action type, and changes appropriate - /// threshold defined by the [ActionType] variants. - pub fn set_threshold( - &mut self, - action_type: ActionType, - new_threshold: Weight, - ) -> Result<(), SetThresholdFailure> { - match action_type { - ActionType::Deployment => self.set_deployment_threshold(new_threshold), - ActionType::KeyManagement => self.set_key_management_threshold(new_threshold), - ActionType::UpgradeManagement => self.set_upgrade_management_threshold(new_threshold), - } - } -} - -impl Default for ActionThresholds { - fn default() -> Self { - ActionThresholds { - deployment: Weight::new(1), - upgrade_management: Weight::new(1), - key_management: Weight::new(1), - } - } -} - -impl From for ActionThresholds { - fn from(value: AccountActionThresholds) -> Self { - Self { - deployment: Weight::new(value.deployment.value()), - key_management: Weight::new(value.key_management.value()), - upgrade_management: Weight::new(1), - } - } -} - -impl ToBytes for ActionThresholds { - fn to_bytes(&self) -> Result, Error> { - let mut result = bytesrepr::unchecked_allocate_buffer(self); - result.append(&mut self.deployment.to_bytes()?); - result.append(&mut self.upgrade_management.to_bytes()?); - result.append(&mut self.key_management.to_bytes()?); - Ok(result) - } - - fn serialized_length(&self) -> usize { - 3 * WEIGHT_SERIALIZED_LENGTH - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.deployment().write_bytes(writer)?; - self.upgrade_management().write_bytes(writer)?; - self.key_management().write_bytes(writer)?; - Ok(()) - } -} - -impl FromBytes for ActionThresholds { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (deployment, rem) = Weight::from_bytes(bytes)?; - let (upgrade_management, rem) = Weight::from_bytes(rem)?; - let (key_management, rem) = Weight::from_bytes(rem)?; - let ret = ActionThresholds { - deployment, - upgrade_management, - key_management, - }; - Ok((ret, rem)) - } -} - -#[doc(hidden)] -#[cfg(any(feature = "testing", feature = "gens", test))] -pub mod gens { - use proptest::prelude::*; - - use super::ActionThresholds; - - pub fn action_thresholds_arb() -> impl Strategy { - Just(Default::default()) - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn should_create_new_action_thresholds() { - let action_thresholds = - ActionThresholds::new(Weight::new(1), Weight::new(1), Weight::new(42)).unwrap(); - assert_eq!(*action_thresholds.deployment(), Weight::new(1)); - assert_eq!(*action_thresholds.upgrade_management(), Weight::new(1)); - assert_eq!(*action_thresholds.key_management(), Weight::new(42)); - } - - #[test] - fn should_not_create_action_thresholds_with_invalid_deployment_threshold() { - // deployment cant be greater than key management - assert!(ActionThresholds::new(Weight::new(5), Weight::new(1), Weight::new(1)).is_err()); - } - - #[test] - fn serialization_roundtrip() { - let action_thresholds = - ActionThresholds::new(Weight::new(1), Weight::new(1), Weight::new(42)).unwrap(); - bytesrepr::test_serialization_roundtrip(&action_thresholds); - } -} diff --git a/casper_types_ver_2_0/src/addressable_entity/action_type.rs b/casper_types_ver_2_0/src/addressable_entity/action_type.rs deleted file mode 100644 index 2a627309..00000000 --- a/casper_types_ver_2_0/src/addressable_entity/action_type.rs +++ /dev/null @@ -1,38 +0,0 @@ -use core::convert::TryFrom; - -use super::TryFromIntError; - -/// The various types of action which can be performed in the context of a given account. -#[repr(u32)] -pub enum ActionType { - /// Represents performing a deploy. - Deployment = 0, - /// Represents changing the associated keys (i.e. map of [`AccountHash`](super::AccountHash)s - /// to [`Weight`](super::Weight)s) or action thresholds (i.e. the total - /// [`Weight`](super::Weight)s of signing [`AccountHash`](super::AccountHash)s required to - /// perform various actions). - KeyManagement = 1, - /// Represents changing the associated keys (i.e. map of [`AccountHash`](super::AccountHash)s - /// to [`Weight`](super::Weight)s) or action thresholds (i.e. the total - /// [`Weight`](super::Weight)s of signing [`AccountHash`](super::AccountHash)s required to - /// upgrade the addressable entity. - UpgradeManagement = 2, -} - -// This conversion is not intended to be used by third party crates. -#[doc(hidden)] -impl TryFrom for ActionType { - type Error = TryFromIntError; - - fn try_from(value: u32) -> Result { - // This doesn't use `num_derive` traits such as FromPrimitive and ToPrimitive - // that helps to automatically create `from_u32` and `to_u32`. This approach - // gives better control over generated code. - match value { - d if d == ActionType::Deployment as u32 => Ok(ActionType::Deployment), - d if d == ActionType::KeyManagement as u32 => Ok(ActionType::KeyManagement), - d if d == ActionType::UpgradeManagement as u32 => Ok(ActionType::UpgradeManagement), - _ => Err(TryFromIntError(())), - } - } -} diff --git a/casper_types_ver_2_0/src/addressable_entity/associated_keys.rs b/casper_types_ver_2_0/src/addressable_entity/associated_keys.rs deleted file mode 100644 index 9f8ae2ac..00000000 --- a/casper_types_ver_2_0/src/addressable_entity/associated_keys.rs +++ /dev/null @@ -1,386 +0,0 @@ -//! This module contains types and functions for working with keys associated with an account. - -use alloc::{ - collections::{btree_map::Entry, BTreeMap, BTreeSet}, - vec::Vec, -}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; -#[cfg(feature = "json-schema")] -use serde_map_to_array::KeyValueJsonSchema; -use serde_map_to_array::{BTreeMapToArray, KeyValueLabels}; - -use crate::{ - account::{AccountHash, AssociatedKeys as AccountAssociatedKeys}, - addressable_entity::{AddKeyFailure, RemoveKeyFailure, UpdateKeyFailure, Weight}, - bytesrepr::{self, FromBytes, ToBytes}, -}; - -/// A collection of weighted public keys (represented as account hashes) associated with an account. -#[derive(Default, PartialOrd, Ord, PartialEq, Eq, Clone, Debug, Serialize, Deserialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[cfg_attr(feature = "json-schema", schemars(rename = "EntityAssociatedKeys"))] -#[serde(deny_unknown_fields)] -#[rustfmt::skip] -pub struct AssociatedKeys( - #[serde(with = "BTreeMapToArray::")] - BTreeMap, -); - -impl AssociatedKeys { - /// Constructs a new AssociatedKeys. - pub fn new(key: AccountHash, weight: Weight) -> AssociatedKeys { - let mut bt: BTreeMap = BTreeMap::new(); - bt.insert(key, weight); - AssociatedKeys(bt) - } - - /// Adds a new AssociatedKey to the set. - /// - /// Returns true if added successfully, false otherwise. - pub fn add_key(&mut self, key: AccountHash, weight: Weight) -> Result<(), AddKeyFailure> { - match self.0.entry(key) { - Entry::Vacant(entry) => { - entry.insert(weight); - } - Entry::Occupied(_) => return Err(AddKeyFailure::DuplicateKey), - } - Ok(()) - } - - /// Removes key from the associated keys set. - /// Returns true if value was found in the set prior to the removal, false - /// otherwise. - pub fn remove_key(&mut self, key: &AccountHash) -> Result<(), RemoveKeyFailure> { - self.0 - .remove(key) - .map(|_| ()) - .ok_or(RemoveKeyFailure::MissingKey) - } - - /// Adds new AssociatedKey to the set. - /// Returns true if added successfully, false otherwise. - pub fn update_key(&mut self, key: AccountHash, weight: Weight) -> Result<(), UpdateKeyFailure> { - match self.0.entry(key) { - Entry::Vacant(_) => { - return Err(UpdateKeyFailure::MissingKey); - } - Entry::Occupied(mut entry) => { - *entry.get_mut() = weight; - } - } - Ok(()) - } - - /// Returns the weight of an account hash. - pub fn get(&self, key: &AccountHash) -> Option<&Weight> { - self.0.get(key) - } - - /// Returns `true` if a given key exists. - pub fn contains_key(&self, key: &AccountHash) -> bool { - self.0.contains_key(key) - } - - /// Returns an iterator over the account hash and the weights. - pub fn iter(&self) -> impl Iterator { - self.0.iter() - } - - /// Returns the count of the associated keys. - pub fn len(&self) -> usize { - self.0.len() - } - - /// Returns `true` if the associated keys are empty. - pub fn is_empty(&self) -> bool { - self.0.is_empty() - } - - /// Helper method that calculates weight for keys that comes from any - /// source. - /// - /// This method is not concerned about uniqueness of the passed iterable. - /// Uniqueness is determined based on the input collection properties, - /// which is either BTreeSet (in [`AssociatedKeys::calculate_keys_weight`]) - /// or BTreeMap (in [`AssociatedKeys::total_keys_weight`]). - fn calculate_any_keys_weight<'a>(&self, keys: impl Iterator) -> Weight { - let total = keys - .filter_map(|key| self.0.get(key)) - .fold(0u8, |acc, w| acc.saturating_add(w.value())); - - Weight::new(total) - } - - /// Calculates total weight of authorization keys provided by an argument - pub fn calculate_keys_weight(&self, authorization_keys: &BTreeSet) -> Weight { - self.calculate_any_keys_weight(authorization_keys.iter()) - } - - /// Calculates total weight of all authorization keys - pub fn total_keys_weight(&self) -> Weight { - self.calculate_any_keys_weight(self.0.keys()) - } - - /// Calculates total weight of all authorization keys excluding a given key - pub fn total_keys_weight_excluding(&self, account_hash: AccountHash) -> Weight { - self.calculate_any_keys_weight(self.0.keys().filter(|&&element| element != account_hash)) - } -} - -impl From> for AssociatedKeys { - fn from(associated_keys: BTreeMap) -> Self { - Self(associated_keys) - } -} - -impl ToBytes for AssociatedKeys { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - self.0.to_bytes() - } - - fn serialized_length(&self) -> usize { - self.0.serialized_length() - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.0.write_bytes(writer) - } -} - -impl FromBytes for AssociatedKeys { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (associated_keys, rem) = FromBytes::from_bytes(bytes)?; - Ok((AssociatedKeys(associated_keys), rem)) - } -} - -impl From for AssociatedKeys { - fn from(value: AccountAssociatedKeys) -> Self { - let mut associated_keys = AssociatedKeys::default(); - for (account_hash, weight) in value.iter() { - associated_keys - .0 - .insert(*account_hash, Weight::new(weight.value())); - } - associated_keys - } -} - -struct Labels; - -impl KeyValueLabels for Labels { - const KEY: &'static str = "account_hash"; - const VALUE: &'static str = "weight"; -} - -#[cfg(feature = "json-schema")] -impl KeyValueJsonSchema for Labels { - const JSON_SCHEMA_KV_NAME: Option<&'static str> = Some("AssociatedKey"); - const JSON_SCHEMA_KV_DESCRIPTION: Option<&'static str> = Some("A weighted public key."); - const JSON_SCHEMA_KEY_DESCRIPTION: Option<&'static str> = - Some("The account hash of the public key."); - const JSON_SCHEMA_VALUE_DESCRIPTION: Option<&'static str> = - Some("The weight assigned to the public key."); -} - -#[doc(hidden)] -#[cfg(any(feature = "testing", feature = "gens", test))] -pub mod gens { - use proptest::prelude::*; - - use crate::gens::{account_hash_arb, weight_arb}; - - use super::AssociatedKeys; - - pub fn associated_keys_arb() -> impl Strategy { - proptest::collection::btree_map(account_hash_arb(), weight_arb(), 10).prop_map(|keys| { - let mut associated_keys = AssociatedKeys::default(); - keys.into_iter().for_each(|(k, v)| { - associated_keys.add_key(k, v).unwrap(); - }); - associated_keys - }) - } -} - -#[cfg(test)] -mod tests { - use std::{collections::BTreeSet, iter::FromIterator}; - - use crate::{ - account::{AccountHash, ACCOUNT_HASH_LENGTH}, - addressable_entity::{AddKeyFailure, Weight}, - bytesrepr, - }; - - use super::*; - - #[test] - fn associated_keys_add() { - let mut keys = - AssociatedKeys::new(AccountHash::new([0u8; ACCOUNT_HASH_LENGTH]), Weight::new(1)); - let new_pk = AccountHash::new([1u8; ACCOUNT_HASH_LENGTH]); - let new_pk_weight = Weight::new(2); - assert!(keys.add_key(new_pk, new_pk_weight).is_ok()); - assert_eq!(keys.get(&new_pk), Some(&new_pk_weight)) - } - - #[test] - fn associated_keys_add_duplicate() { - let pk = AccountHash::new([0u8; ACCOUNT_HASH_LENGTH]); - let weight = Weight::new(1); - let mut keys = AssociatedKeys::new(pk, weight); - assert_eq!( - keys.add_key(pk, Weight::new(10)), - Err(AddKeyFailure::DuplicateKey) - ); - assert_eq!(keys.get(&pk), Some(&weight)); - } - - #[test] - fn associated_keys_remove() { - let pk = AccountHash::new([0u8; ACCOUNT_HASH_LENGTH]); - let weight = Weight::new(1); - let mut keys = AssociatedKeys::new(pk, weight); - assert!(keys.remove_key(&pk).is_ok()); - assert!(keys - .remove_key(&AccountHash::new([1u8; ACCOUNT_HASH_LENGTH])) - .is_err()); - } - - #[test] - fn associated_keys_update() { - let pk1 = AccountHash::new([0u8; ACCOUNT_HASH_LENGTH]); - let pk2 = AccountHash::new([1u8; ACCOUNT_HASH_LENGTH]); - let weight = Weight::new(1); - let mut keys = AssociatedKeys::new(pk1, weight); - assert!(matches!( - keys.update_key(pk2, Weight::new(2)) - .expect_err("should get error"), - UpdateKeyFailure::MissingKey - )); - keys.add_key(pk2, Weight::new(1)).unwrap(); - assert_eq!(keys.get(&pk2), Some(&Weight::new(1))); - keys.update_key(pk2, Weight::new(2)).unwrap(); - assert_eq!(keys.get(&pk2), Some(&Weight::new(2))); - } - - #[test] - fn associated_keys_calculate_keys_once() { - let key_1 = AccountHash::new([0; 32]); - let key_2 = AccountHash::new([1; 32]); - let key_3 = AccountHash::new([2; 32]); - let mut keys = AssociatedKeys::default(); - - keys.add_key(key_2, Weight::new(2)) - .expect("should add key_1"); - keys.add_key(key_1, Weight::new(1)) - .expect("should add key_1"); - keys.add_key(key_3, Weight::new(3)) - .expect("should add key_1"); - - assert_eq!( - keys.calculate_keys_weight(&BTreeSet::from_iter(vec![ - key_1, key_2, key_3, key_1, key_2, key_3, - ])), - Weight::new(1 + 2 + 3) - ); - } - - #[test] - fn associated_keys_total_weight() { - let associated_keys = { - let mut res = AssociatedKeys::new(AccountHash::new([1u8; 32]), Weight::new(1)); - res.add_key(AccountHash::new([2u8; 32]), Weight::new(11)) - .expect("should add key 1"); - res.add_key(AccountHash::new([3u8; 32]), Weight::new(12)) - .expect("should add key 2"); - res.add_key(AccountHash::new([4u8; 32]), Weight::new(13)) - .expect("should add key 3"); - res - }; - assert_eq!( - associated_keys.total_keys_weight(), - Weight::new(1 + 11 + 12 + 13) - ); - } - - #[test] - fn associated_keys_total_weight_excluding() { - let identity_key = AccountHash::new([1u8; 32]); - let identity_key_weight = Weight::new(1); - - let key_1 = AccountHash::new([2u8; 32]); - let key_1_weight = Weight::new(11); - - let key_2 = AccountHash::new([3u8; 32]); - let key_2_weight = Weight::new(12); - - let key_3 = AccountHash::new([4u8; 32]); - let key_3_weight = Weight::new(13); - - let associated_keys = { - let mut res = AssociatedKeys::new(identity_key, identity_key_weight); - res.add_key(key_1, key_1_weight).expect("should add key 1"); - res.add_key(key_2, key_2_weight).expect("should add key 2"); - res.add_key(key_3, key_3_weight).expect("should add key 3"); - res - }; - assert_eq!( - associated_keys.total_keys_weight_excluding(key_2), - Weight::new(identity_key_weight.value() + key_1_weight.value() + key_3_weight.value()) - ); - } - - #[test] - fn overflowing_keys_weight() { - let identity_key = AccountHash::new([1u8; 32]); - let key_1 = AccountHash::new([2u8; 32]); - let key_2 = AccountHash::new([3u8; 32]); - let key_3 = AccountHash::new([4u8; 32]); - - let identity_key_weight = Weight::new(250); - let weight_1 = Weight::new(1); - let weight_2 = Weight::new(2); - let weight_3 = Weight::new(3); - - let saturated_weight = Weight::new(u8::max_value()); - - let associated_keys = { - let mut res = AssociatedKeys::new(identity_key, identity_key_weight); - - res.add_key(key_1, weight_1).expect("should add key 1"); - res.add_key(key_2, weight_2).expect("should add key 2"); - res.add_key(key_3, weight_3).expect("should add key 3"); - res - }; - - assert_eq!( - associated_keys.calculate_keys_weight(&BTreeSet::from_iter(vec![ - identity_key, // 250 - key_1, // 251 - key_2, // 253 - key_3, // 256 - error - ])), - saturated_weight, - ); - } - - #[test] - fn serialization_roundtrip() { - let mut keys = AssociatedKeys::default(); - keys.add_key(AccountHash::new([1; 32]), Weight::new(1)) - .unwrap(); - keys.add_key(AccountHash::new([2; 32]), Weight::new(2)) - .unwrap(); - keys.add_key(AccountHash::new([3; 32]), Weight::new(3)) - .unwrap(); - bytesrepr::test_serialization_roundtrip(&keys); - } -} diff --git a/casper_types_ver_2_0/src/addressable_entity/error.rs b/casper_types_ver_2_0/src/addressable_entity/error.rs deleted file mode 100644 index f4a75866..00000000 --- a/casper_types_ver_2_0/src/addressable_entity/error.rs +++ /dev/null @@ -1,112 +0,0 @@ -use core::{ - array::TryFromSliceError, - convert::TryFrom, - fmt::{self, Display, Formatter}, -}; - -// This error type is not intended to be used by third party crates. -#[doc(hidden)] -#[derive(Debug, Eq, PartialEq)] -pub struct TryFromIntError(pub ()); - -/// Error returned when decoding an `AccountHash` from a formatted string. -#[derive(Debug)] -#[non_exhaustive] -pub enum FromAccountHashStrError { - /// The prefix is invalid. - InvalidPrefix, - /// The hash is not valid hex. - Hex(base16::DecodeError), - /// The hash is the wrong length. - Hash(TryFromSliceError), -} - -impl From for FromAccountHashStrError { - fn from(error: base16::DecodeError) -> Self { - FromAccountHashStrError::Hex(error) - } -} - -impl From for FromAccountHashStrError { - fn from(error: TryFromSliceError) -> Self { - FromAccountHashStrError::Hash(error) - } -} - -impl Display for FromAccountHashStrError { - fn fmt(&self, f: &mut Formatter) -> fmt::Result { - match self { - FromAccountHashStrError::InvalidPrefix => write!(f, "prefix is not 'account-hash-'"), - FromAccountHashStrError::Hex(error) => { - write!(f, "failed to decode address portion from hex: {}", error) - } - FromAccountHashStrError::Hash(error) => { - write!(f, "address portion is wrong length: {}", error) - } - } - } -} - -/// Errors that can occur while changing action thresholds (i.e. the total -/// [`Weight`](super::Weight)s of signing [`AccountHash`](super::AccountHash)s required to perform -/// various actions) on an account. -#[repr(i32)] -#[derive(Debug, PartialEq, Eq, Copy, Clone)] -#[non_exhaustive] -pub enum SetThresholdFailure { - /// Setting the key-management threshold to a value lower than the deployment threshold is - /// disallowed. - KeyManagementThreshold = 1, - /// Setting the deployment threshold to a value greater than any other threshold is disallowed. - DeploymentThreshold = 2, - /// Caller doesn't have sufficient permissions to set new thresholds. - PermissionDeniedError = 3, - /// Setting a threshold to a value greater than the total weight of associated keys is - /// disallowed. - InsufficientTotalWeight = 4, -} - -// This conversion is not intended to be used by third party crates. -#[doc(hidden)] -impl TryFrom for SetThresholdFailure { - type Error = TryFromIntError; - - fn try_from(value: i32) -> Result { - match value { - d if d == SetThresholdFailure::KeyManagementThreshold as i32 => { - Ok(SetThresholdFailure::KeyManagementThreshold) - } - d if d == SetThresholdFailure::DeploymentThreshold as i32 => { - Ok(SetThresholdFailure::DeploymentThreshold) - } - d if d == SetThresholdFailure::PermissionDeniedError as i32 => { - Ok(SetThresholdFailure::PermissionDeniedError) - } - d if d == SetThresholdFailure::InsufficientTotalWeight as i32 => { - Ok(SetThresholdFailure::InsufficientTotalWeight) - } - _ => Err(TryFromIntError(())), - } - } -} - -impl Display for SetThresholdFailure { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - match self { - SetThresholdFailure::KeyManagementThreshold => formatter - .write_str("New threshold should be greater than or equal to deployment threshold"), - SetThresholdFailure::DeploymentThreshold => formatter.write_str( - "New threshold should be lower than or equal to key management threshold", - ), - SetThresholdFailure::PermissionDeniedError => formatter - .write_str("Unable to set action threshold due to insufficient permissions"), - SetThresholdFailure::InsufficientTotalWeight => formatter.write_str( - "New threshold should be lower or equal than total weight of associated keys", - ), - } - } -} - -/// Associated error type of `TryFrom<&[u8]>` for [`AccountHash`](super::AccountHash). -#[derive(Debug)] -pub struct TryFromSliceForAccountHashError(()); diff --git a/casper_types_ver_2_0/src/addressable_entity/named_keys.rs b/casper_types_ver_2_0/src/addressable_entity/named_keys.rs deleted file mode 100644 index 37a0bcd0..00000000 --- a/casper_types_ver_2_0/src/addressable_entity/named_keys.rs +++ /dev/null @@ -1,166 +0,0 @@ -use alloc::{collections::BTreeMap, string::String, vec::Vec}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; -use serde_map_to_array::{BTreeMapToArray, KeyValueLabels}; - -#[cfg(feature = "json-schema")] -use crate::execution::execution_result_v1::NamedKey; -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - CLType, CLTyped, Key, -}; - -/// A collection of named keys. -#[derive(Clone, Eq, PartialEq, Default, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[serde(deny_unknown_fields)] -#[rustfmt::skip] -pub struct NamedKeys( - #[serde(with = "BTreeMapToArray::")] - #[cfg_attr(feature = "json-schema", schemars(with = "Vec"))] - BTreeMap, -); - -impl NamedKeys { - /// Constructs a new, empty `NamedKeys`. - pub const fn new() -> Self { - NamedKeys(BTreeMap::new()) - } - - /// Consumes `self`, returning the wrapped map. - pub fn into_inner(self) -> BTreeMap { - self.0 - } - - /// Inserts a named key. - /// - /// If the map did not have this name present, `None` is returned. If the map did have this - /// name present, the `Key` is updated, and the old `Key` is returned. - pub fn insert(&mut self, name: String, key: Key) -> Option { - self.0.insert(name, key) - } - - /// Moves all elements from `other` into `self`. - pub fn append(&mut self, mut other: Self) { - self.0.append(&mut other.0) - } - - /// Removes a named `Key`, returning the `Key` if it existed in the collection. - pub fn remove(&mut self, name: &str) -> Option { - self.0.remove(name) - } - - /// Returns a reference to the `Key` under the given `name` if any. - pub fn get(&self, name: &str) -> Option<&Key> { - self.0.get(name) - } - - /// Returns `true` if the named `Key` exists in the collection. - pub fn contains(&self, name: &str) -> bool { - self.0.contains_key(name) - } - - /// Returns an iterator over the names. - pub fn names(&self) -> impl Iterator { - self.0.keys() - } - - /// Returns an iterator over the `Key`s (i.e. the map's values). - pub fn keys(&self) -> impl Iterator { - self.0.values() - } - - /// Returns a mutable iterator over the `Key`s (i.e. the map's values). - pub fn keys_mut(&mut self) -> impl Iterator { - self.0.values_mut() - } - - /// Returns an iterator over the name-key pairs. - pub fn iter(&self) -> impl Iterator { - self.0.iter() - } - - /// Returns the number of named `Key`s. - pub fn len(&self) -> usize { - self.0.len() - } - - /// Returns `true` if there are no named `Key`s. - pub fn is_empty(&self) -> bool { - self.0.is_empty() - } -} - -impl From> for NamedKeys { - fn from(value: BTreeMap) -> Self { - NamedKeys(value) - } -} - -impl ToBytes for NamedKeys { - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.0.write_bytes(writer) - } - - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - self.0.serialized_length() - } -} - -impl FromBytes for NamedKeys { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (named_keys, remainder) = BTreeMap::::from_bytes(bytes)?; - Ok((NamedKeys(named_keys), remainder)) - } -} - -impl CLTyped for NamedKeys { - fn cl_type() -> CLType { - BTreeMap::::cl_type() - } -} - -struct Labels; - -impl KeyValueLabels for Labels { - const KEY: &'static str = "name"; - const VALUE: &'static str = "key"; -} - -#[cfg(test)] -mod tests { - use rand::Rng; - - use super::*; - use crate::testing::TestRng; - - /// `NamedKeys` was previously (pre node v2.0.0) just an alias for `BTreeMap`. - /// Check if we serialize as the old form, that can deserialize to the new. - #[test] - fn should_be_backwards_compatible() { - let rng = &mut TestRng::new(); - let mut named_keys = NamedKeys::new(); - assert!(named_keys.insert("a".to_string(), rng.gen()).is_none()); - assert!(named_keys.insert("bb".to_string(), rng.gen()).is_none()); - assert!(named_keys.insert("ccc".to_string(), rng.gen()).is_none()); - - let serialized_old = bincode::serialize(&named_keys.0).unwrap(); - let parsed_new = bincode::deserialize(&serialized_old).unwrap(); - assert_eq!(named_keys, parsed_new); - - let serialized_old = bytesrepr::serialize(&named_keys.0).unwrap(); - let parsed_new = bytesrepr::deserialize(serialized_old).unwrap(); - assert_eq!(named_keys, parsed_new); - } -} diff --git a/casper_types_ver_2_0/src/addressable_entity/weight.rs b/casper_types_ver_2_0/src/addressable_entity/weight.rs deleted file mode 100644 index ee2f0343..00000000 --- a/casper_types_ver_2_0/src/addressable_entity/weight.rs +++ /dev/null @@ -1,66 +0,0 @@ -use alloc::vec::Vec; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; - -use serde::{Deserialize, Serialize}; - -use crate::{ - bytesrepr::{self, Error, FromBytes, ToBytes, U8_SERIALIZED_LENGTH}, - CLType, CLTyped, -}; - -/// The number of bytes in a serialized [`Weight`]. -pub const WEIGHT_SERIALIZED_LENGTH: usize = U8_SERIALIZED_LENGTH; - -/// The weight associated with public keys in an account's associated keys. -#[derive(PartialOrd, Ord, PartialEq, Eq, Clone, Copy, Debug, Serialize, Deserialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[cfg_attr( - feature = "json-schema", - schemars(rename = "EntityAssociatedKeyWeight") -)] -pub struct Weight(u8); - -impl Weight { - /// Constructs a new `Weight`. - pub const fn new(weight: u8) -> Weight { - Weight(weight) - } - - /// Returns the value of `self` as a `u8`. - pub fn value(self) -> u8 { - self.0 - } -} - -impl ToBytes for Weight { - fn to_bytes(&self) -> Result, Error> { - self.0.to_bytes() - } - - fn serialized_length(&self) -> usize { - WEIGHT_SERIALIZED_LENGTH - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - writer.push(self.0); - Ok(()) - } -} - -impl FromBytes for Weight { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (byte, rem) = u8::from_bytes(bytes)?; - Ok((Weight::new(byte), rem)) - } -} - -impl CLTyped for Weight { - fn cl_type() -> CLType { - CLType::U8 - } -} diff --git a/casper_types_ver_2_0/src/api_error.rs b/casper_types_ver_2_0/src/api_error.rs deleted file mode 100644 index 2c1a1d59..00000000 --- a/casper_types_ver_2_0/src/api_error.rs +++ /dev/null @@ -1,949 +0,0 @@ -//! Contains [`ApiError`] and associated helper functions. - -use core::{ - convert::TryFrom, - fmt::{self, Debug, Formatter}, -}; - -use crate::{ - addressable_entity::{ - self, AddKeyFailure, MessageTopicError, RemoveKeyFailure, SetThresholdFailure, - TryFromIntError, TryFromSliceForAccountHashError, UpdateKeyFailure, - }, - bytesrepr, - system::{auction, handle_payment, mint}, - CLValueError, -}; - -/// All `Error` variants defined in this library other than `Error::User` will convert to a `u32` -/// value less than or equal to `RESERVED_ERROR_MAX`. -const RESERVED_ERROR_MAX: u32 = u16::MAX as u32; // 0..=65535 - -/// Handle Payment errors will have this value added to them when being converted to a `u32`. -const POS_ERROR_OFFSET: u32 = RESERVED_ERROR_MAX - u8::MAX as u32; // 65280..=65535 - -/// Mint errors will have this value added to them when being converted to a `u32`. -const MINT_ERROR_OFFSET: u32 = (POS_ERROR_OFFSET - 1) - u8::MAX as u32; // 65024..=65279 - -/// Contract header errors will have this value added to them when being converted to a `u32`. -const HEADER_ERROR_OFFSET: u32 = (MINT_ERROR_OFFSET - 1) - u8::MAX as u32; // 64768..=65023 - -/// Contract header errors will have this value added to them when being converted to a `u32`. -const AUCTION_ERROR_OFFSET: u32 = (HEADER_ERROR_OFFSET - 1) - u8::MAX as u32; // 64512..=64767 - -/// Minimum value of user error's inclusive range. -const USER_ERROR_MIN: u32 = RESERVED_ERROR_MAX + 1; - -/// Maximum value of user error's inclusive range. -const USER_ERROR_MAX: u32 = 2 * RESERVED_ERROR_MAX + 1; - -/// Minimum value of Mint error's inclusive range. -const MINT_ERROR_MIN: u32 = MINT_ERROR_OFFSET; - -/// Maximum value of Mint error's inclusive range. -const MINT_ERROR_MAX: u32 = POS_ERROR_OFFSET - 1; - -/// Minimum value of Handle Payment error's inclusive range. -const HP_ERROR_MIN: u32 = POS_ERROR_OFFSET; - -/// Maximum value of Handle Payment error's inclusive range. -const HP_ERROR_MAX: u32 = RESERVED_ERROR_MAX; - -/// Minimum value of contract header error's inclusive range. -const HEADER_ERROR_MIN: u32 = HEADER_ERROR_OFFSET; - -/// Maximum value of contract header error's inclusive range. -const HEADER_ERROR_MAX: u32 = HEADER_ERROR_OFFSET + u8::MAX as u32; - -/// Minimum value of an auction contract error's inclusive range. -const AUCTION_ERROR_MIN: u32 = AUCTION_ERROR_OFFSET; - -/// Maximum value of an auction contract error's inclusive range. -const AUCTION_ERROR_MAX: u32 = AUCTION_ERROR_OFFSET + u8::MAX as u32; - -/// Errors which can be encountered while running a smart contract. -/// -/// An `ApiError` can be converted to a `u32` in order to be passed via the execution engine's -/// `ext_ffi::casper_revert()` function. This means the information each variant can convey is -/// limited. -/// -/// The variants are split into numeric ranges as follows: -/// -/// | Inclusive range | Variant(s) | -/// | ----------------| ----------------------------------------------------------------| -/// | [1, 64511] | all except reserved system contract error ranges defined below. | -/// | [64512, 64767] | `Auction` | -/// | [64768, 65023] | `ContractHeader` | -/// | [65024, 65279] | `Mint` | -/// | [65280, 65535] | `HandlePayment` | -/// | [65536, 131071] | `User` | -/// -/// Users can specify a C-style enum and implement `From` to ease usage of -/// `casper_contract::runtime::revert()`, e.g. -/// ``` -/// use casper_types_ver_2_0::ApiError; -/// -/// #[repr(u16)] -/// enum FailureCode { -/// Zero = 0, // 65,536 as an ApiError::User -/// One, // 65,537 as an ApiError::User -/// Two // 65,538 as an ApiError::User -/// } -/// -/// impl From for ApiError { -/// fn from(code: FailureCode) -> Self { -/// ApiError::User(code as u16) -/// } -/// } -/// -/// assert_eq!(ApiError::User(1), FailureCode::One.into()); -/// assert_eq!(65_536, u32::from(ApiError::from(FailureCode::Zero))); -/// assert_eq!(65_538, u32::from(ApiError::from(FailureCode::Two))); -/// ``` -#[derive(Copy, Clone, PartialEq, Eq)] -#[non_exhaustive] -pub enum ApiError { - /// Optional data was unexpectedly `None`. - /// ``` - /// # use casper_types_ver_2_0::ApiError; - /// assert_eq!(ApiError::from(1), ApiError::None); - /// ``` - None, - /// Specified argument not provided. - /// ``` - /// # use casper_types_ver_2_0::ApiError; - /// assert_eq!(ApiError::from(2), ApiError::MissingArgument); - /// ``` - MissingArgument, - /// Argument not of correct type. - /// ``` - /// # use casper_types_ver_2_0::ApiError; - /// assert_eq!(ApiError::from(3), ApiError::InvalidArgument); - /// ``` - InvalidArgument, - /// Failed to deserialize a value. - /// ``` - /// # use casper_types_ver_2_0::ApiError; - /// assert_eq!(ApiError::from(4), ApiError::Deserialize); - /// ``` - Deserialize, - /// `casper_contract::storage::read()` returned an error. - /// ``` - /// # use casper_types_ver_2_0::ApiError; - /// assert_eq!(ApiError::from(5), ApiError::Read); - /// ``` - Read, - /// The given key returned a `None` value. - /// ``` - /// # use casper_types_ver_2_0::ApiError; - /// assert_eq!(ApiError::from(6), ApiError::ValueNotFound); - /// ``` - ValueNotFound, - /// Failed to find a specified contract. - /// ``` - /// # use casper_types_ver_2_0::ApiError; - /// assert_eq!(ApiError::from(7), ApiError::ContractNotFound); - /// ``` - ContractNotFound, - /// A call to `casper_contract::runtime::get_key()` returned a failure. - /// ``` - /// # use casper_types_ver_2_0::ApiError; - /// assert_eq!(ApiError::from(8), ApiError::GetKey); - /// ``` - GetKey, - /// The [`Key`](crate::Key) variant was not as expected. - /// ``` - /// # use casper_types_ver_2_0::ApiError; - /// assert_eq!(ApiError::from(9), ApiError::UnexpectedKeyVariant); - /// ``` - UnexpectedKeyVariant, - /// Obsolete error variant (we no longer have ContractRef). - /// ``` - /// # use casper_types_ver_2_0::ApiError; - /// assert_eq!(ApiError::from(10), ApiError::UnexpectedContractRefVariant); - /// ``` - UnexpectedContractRefVariant, // TODO: this variant is not used any longer and can be removed - /// Invalid purse name given. - /// ``` - /// # use casper_types_ver_2_0::ApiError; - /// assert_eq!(ApiError::from(11), ApiError::InvalidPurseName); - /// ``` - InvalidPurseName, - /// Invalid purse retrieved. - /// ``` - /// # use casper_types_ver_2_0::ApiError; - /// assert_eq!(ApiError::from(12), ApiError::InvalidPurse); - /// ``` - InvalidPurse, - /// Failed to upgrade contract at [`URef`](crate::URef). - /// ``` - /// # use casper_types_ver_2_0::ApiError; - /// assert_eq!(ApiError::from(13), ApiError::UpgradeContractAtURef); - /// ``` - UpgradeContractAtURef, - /// Failed to transfer motes. - /// ``` - /// # use casper_types_ver_2_0::ApiError; - /// assert_eq!(ApiError::from(14), ApiError::Transfer); - /// ``` - Transfer, - /// The given [`URef`](crate::URef) has no access rights. - /// ``` - /// # use casper_types_ver_2_0::ApiError; - /// assert_eq!(ApiError::from(15), ApiError::NoAccessRights); - /// ``` - NoAccessRights, - /// A given type could not be constructed from a [`CLValue`](crate::CLValue). - /// ``` - /// # use casper_types_ver_2_0::ApiError; - /// assert_eq!(ApiError::from(16), ApiError::CLTypeMismatch); - /// ``` - CLTypeMismatch, - /// Early end of stream while deserializing. - /// ``` - /// # use casper_types_ver_2_0::ApiError; - /// assert_eq!(ApiError::from(17), ApiError::EarlyEndOfStream); - /// ``` - EarlyEndOfStream, - /// Formatting error while deserializing. - /// ``` - /// # use casper_types_ver_2_0::ApiError; - /// assert_eq!(ApiError::from(18), ApiError::Formatting); - /// ``` - Formatting, - /// Not all input bytes were consumed in [`deserialize`](crate::bytesrepr::deserialize). - /// ``` - /// # use casper_types_ver_2_0::ApiError; - /// assert_eq!(ApiError::from(19), ApiError::LeftOverBytes); - /// ``` - LeftOverBytes, - /// Out of memory error. - /// ``` - /// # use casper_types_ver_2_0::ApiError; - /// assert_eq!(ApiError::from(20), ApiError::OutOfMemory); - /// ``` - OutOfMemory, - /// There are already maximum [`AccountHash`](crate::account::AccountHash)s associated with the - /// given account. - /// ``` - /// # use casper_types_ver_2_0::ApiError; - /// assert_eq!(ApiError::from(21), ApiError::MaxKeysLimit); - /// ``` - MaxKeysLimit, - /// The given [`AccountHash`](crate::account::AccountHash) is already associated with the given - /// account. - /// ``` - /// # use casper_types_ver_2_0::ApiError; - /// assert_eq!(ApiError::from(22), ApiError::DuplicateKey); - /// ``` - DuplicateKey, - /// Caller doesn't have sufficient permissions to perform the given action. - /// ``` - /// # use casper_types_ver_2_0::ApiError; - /// assert_eq!(ApiError::from(23), ApiError::PermissionDenied); - /// ``` - PermissionDenied, - /// The given [`AccountHash`](crate::account::AccountHash) is not associated with the given - /// account. - /// ``` - /// # use casper_types_ver_2_0::ApiError; - /// assert_eq!(ApiError::from(24), ApiError::MissingKey); - /// ``` - MissingKey, - /// Removing/updating the given associated [`AccountHash`](crate::account::AccountHash) would - /// cause the total [`Weight`](addressable_entity::Weight) of all remaining `AccountHash`s to - /// fall below one of the action thresholds for the given account. - /// ``` - /// # use casper_types_ver_2_0::ApiError; - /// assert_eq!(ApiError::from(25), ApiError::ThresholdViolation); - /// ``` - ThresholdViolation, - /// Setting the key-management threshold to a value lower than the deployment threshold is - /// disallowed. - /// ``` - /// # use casper_types_ver_2_0::ApiError; - /// assert_eq!(ApiError::from(26), ApiError::KeyManagementThreshold); - /// ``` - KeyManagementThreshold, - /// Setting the deployment threshold to a value greater than any other threshold is disallowed. - /// ``` - /// # use casper_types_ver_2_0::ApiError; - /// assert_eq!(ApiError::from(27), ApiError::DeploymentThreshold); - /// ``` - DeploymentThreshold, - /// Setting a threshold to a value greater than the total weight of associated keys is - /// disallowed. - /// ``` - /// # use casper_types_ver_2_0::ApiError; - /// assert_eq!(ApiError::from(28), ApiError::InsufficientTotalWeight); - /// ``` - InsufficientTotalWeight, - /// The given `u32` doesn't map to a [`SystemContractType`](crate::system::SystemEntityType). - /// ``` - /// # use casper_types_ver_2_0::ApiError; - /// assert_eq!(ApiError::from(29), ApiError::InvalidSystemContract); - /// ``` - InvalidSystemContract, - /// Failed to create a new purse. - /// ``` - /// # use casper_types_ver_2_0::ApiError; - /// assert_eq!(ApiError::from(30), ApiError::PurseNotCreated); - /// ``` - PurseNotCreated, - /// An unhandled value, likely representing a bug in the code. - /// ``` - /// # use casper_types_ver_2_0::ApiError; - /// assert_eq!(ApiError::from(31), ApiError::Unhandled); - /// ``` - Unhandled, - /// The provided buffer is too small to complete an operation. - /// ``` - /// # use casper_types_ver_2_0::ApiError; - /// assert_eq!(ApiError::from(32), ApiError::BufferTooSmall); - /// ``` - BufferTooSmall, - /// No data available in the host buffer. - /// ``` - /// # use casper_types_ver_2_0::ApiError; - /// assert_eq!(ApiError::from(33), ApiError::HostBufferEmpty); - /// ``` - HostBufferEmpty, - /// The host buffer has been set to a value and should be consumed first by a read operation. - /// ``` - /// # use casper_types_ver_2_0::ApiError; - /// assert_eq!(ApiError::from(34), ApiError::HostBufferFull); - /// ``` - HostBufferFull, - /// Could not lay out an array in memory - /// ``` - /// # use casper_types_ver_2_0::ApiError; - /// assert_eq!(ApiError::from(35), ApiError::AllocLayout); - /// ``` - AllocLayout, - /// The `dictionary_item_key` length exceeds the maximum length. - /// ``` - /// # use casper_types_ver_2_0::ApiError; - /// assert_eq!(ApiError::from(36), ApiError::DictionaryItemKeyExceedsLength); - /// ``` - DictionaryItemKeyExceedsLength, - /// The `dictionary_item_key` is invalid. - /// ``` - /// # use casper_types_ver_2_0::ApiError; - /// assert_eq!(ApiError::from(37), ApiError::InvalidDictionaryItemKey); - /// ``` - InvalidDictionaryItemKey, - /// Unable to retrieve the requested system contract hash. - /// ``` - /// # use casper_types_ver_2_0::ApiError; - /// assert_eq!(ApiError::from(38), ApiError::MissingSystemContractHash); - /// ``` - MissingSystemContractHash, - /// Exceeded a recursion depth limit. - /// ``` - /// # use casper_types_ver_2_0::ApiError; - /// assert_eq!(ApiError::from(39), ApiError::ExceededRecursionDepth); - /// ``` - ExceededRecursionDepth, - /// Attempt to serialize a value that does not have a serialized representation. - /// ``` - /// # use casper_types_ver_2_0::ApiError; - /// assert_eq!(ApiError::from(40), ApiError::NonRepresentableSerialization); - /// ``` - NonRepresentableSerialization, - /// Error specific to Auction contract. See - /// [casper_types_ver_2_0::system::auction::Error](crate::system::auction::Error). - /// ``` - /// # use casper_types_ver_2_0::ApiError; - /// for code in 64512..=64767 { - /// assert!(matches!(ApiError::from(code), ApiError::AuctionError(_auction_error))); - /// } - /// ``` - AuctionError(u8), - /// Contract header errors. See - /// [casper_types_ver_2_0::contracts::Error](crate::addressable_entity::Error). - /// - /// ``` - /// # use casper_types_ver_2_0::ApiError; - /// for code in 64768..=65023 { - /// assert!(matches!(ApiError::from(code), ApiError::ContractHeader(_contract_header_error))); - /// } - /// ``` - ContractHeader(u8), - /// Error specific to Mint contract. See - /// [casper_types_ver_2_0::system::mint::Error](crate::system::mint::Error). - /// ``` - /// # use casper_types_ver_2_0::ApiError; - /// for code in 65024..=65279 { - /// assert!(matches!(ApiError::from(code), ApiError::Mint(_mint_error))); - /// } - /// ``` - Mint(u8), - /// Error specific to Handle Payment contract. See - /// [casper_types_ver_2_0::system::handle_payment](crate::system::handle_payment::Error). - /// ``` - /// # use casper_types_ver_2_0::ApiError; - /// for code in 65280..=65535 { - /// assert!(matches!(ApiError::from(code), ApiError::HandlePayment(_handle_payment_error))); - /// } - /// ``` - HandlePayment(u8), - /// User-specified error code. The internal `u16` value is added to `u16::MAX as u32 + 1` when - /// an `Error::User` is converted to a `u32`. - /// ``` - /// # use casper_types_ver_2_0::ApiError; - /// for code in 65536..131071 { - /// assert!(matches!(ApiError::from(code), ApiError::User(_))); - /// } - /// ``` - User(u16), - /// The message topic is already registered. - /// ``` - /// # use casper_types_ver_2_0::ApiError; - /// assert_eq!(ApiError::from(41), ApiError::MessageTopicAlreadyRegistered); - /// ``` - MessageTopicAlreadyRegistered, - /// The maximum number of allowed message topics was exceeded. - /// ``` - /// # use casper_types_ver_2_0::ApiError; - /// assert_eq!(ApiError::from(42), ApiError::MaxTopicsNumberExceeded); - /// ``` - MaxTopicsNumberExceeded, - /// The maximum size for the topic name was exceeded. - /// ``` - /// # use casper_types_ver_2_0::ApiError; - /// assert_eq!(ApiError::from(43), ApiError::MaxTopicNameSizeExceeded); - /// ``` - MaxTopicNameSizeExceeded, - /// The message topic is not registered. - /// ``` - /// # use casper_types_ver_2_0::ApiError; - /// assert_eq!(ApiError::from(44), ApiError::MessageTopicNotRegistered); - /// ``` - MessageTopicNotRegistered, - /// The message topic is full and cannot accept new messages. - /// ``` - /// # use casper_types_ver_2_0::ApiError; - /// assert_eq!(ApiError::from(45), ApiError::MessageTopicFull); - /// ``` - MessageTopicFull, - /// The message topic is full and cannot accept new messages. - /// ``` - /// # use casper_types_ver_2_0::ApiError; - /// assert_eq!(ApiError::from(46), ApiError::MessageTooLarge); - /// ``` - MessageTooLarge, -} - -impl From for ApiError { - fn from(error: bytesrepr::Error) -> Self { - match error { - bytesrepr::Error::EarlyEndOfStream => ApiError::EarlyEndOfStream, - bytesrepr::Error::Formatting => ApiError::Formatting, - bytesrepr::Error::LeftOverBytes => ApiError::LeftOverBytes, - bytesrepr::Error::OutOfMemory => ApiError::OutOfMemory, - bytesrepr::Error::NotRepresentable => ApiError::NonRepresentableSerialization, - bytesrepr::Error::ExceededRecursionDepth => ApiError::ExceededRecursionDepth, - } - } -} - -impl From for ApiError { - fn from(error: AddKeyFailure) -> Self { - match error { - AddKeyFailure::MaxKeysLimit => ApiError::MaxKeysLimit, - AddKeyFailure::DuplicateKey => ApiError::DuplicateKey, - AddKeyFailure::PermissionDenied => ApiError::PermissionDenied, - } - } -} - -impl From for ApiError { - fn from(error: UpdateKeyFailure) -> Self { - match error { - UpdateKeyFailure::MissingKey => ApiError::MissingKey, - UpdateKeyFailure::PermissionDenied => ApiError::PermissionDenied, - UpdateKeyFailure::ThresholdViolation => ApiError::ThresholdViolation, - } - } -} - -impl From for ApiError { - fn from(error: RemoveKeyFailure) -> Self { - match error { - RemoveKeyFailure::MissingKey => ApiError::MissingKey, - RemoveKeyFailure::PermissionDenied => ApiError::PermissionDenied, - RemoveKeyFailure::ThresholdViolation => ApiError::ThresholdViolation, - } - } -} - -impl From for ApiError { - fn from(error: SetThresholdFailure) -> Self { - match error { - SetThresholdFailure::KeyManagementThreshold => ApiError::KeyManagementThreshold, - SetThresholdFailure::DeploymentThreshold => ApiError::DeploymentThreshold, - SetThresholdFailure::PermissionDeniedError => ApiError::PermissionDenied, - SetThresholdFailure::InsufficientTotalWeight => ApiError::InsufficientTotalWeight, - } - } -} - -impl From for ApiError { - fn from(error: CLValueError) -> Self { - match error { - CLValueError::Serialization(bytesrepr_error) => bytesrepr_error.into(), - CLValueError::Type(_) => ApiError::CLTypeMismatch, - } - } -} - -impl From for ApiError { - fn from(error: addressable_entity::Error) -> Self { - ApiError::ContractHeader(error as u8) - } -} - -impl From for ApiError { - fn from(error: auction::Error) -> Self { - ApiError::AuctionError(error as u8) - } -} - -// This conversion is not intended to be used by third party crates. -#[doc(hidden)] -impl From for ApiError { - fn from(_error: TryFromIntError) -> Self { - ApiError::Unhandled - } -} - -impl From for ApiError { - fn from(_error: TryFromSliceForAccountHashError) -> Self { - ApiError::Deserialize - } -} - -impl From for ApiError { - fn from(error: mint::Error) -> Self { - ApiError::Mint(error as u8) - } -} - -impl From for ApiError { - fn from(error: handle_payment::Error) -> Self { - ApiError::HandlePayment(error as u8) - } -} - -impl From for ApiError { - fn from(error: MessageTopicError) -> Self { - match error { - MessageTopicError::DuplicateTopic => ApiError::MessageTopicAlreadyRegistered, - MessageTopicError::MaxTopicsExceeded => ApiError::MaxTopicsNumberExceeded, - MessageTopicError::TopicNameSizeExceeded => ApiError::MaxTopicNameSizeExceeded, - } - } -} - -impl From for u32 { - fn from(error: ApiError) -> Self { - match error { - ApiError::None => 1, - ApiError::MissingArgument => 2, - ApiError::InvalidArgument => 3, - ApiError::Deserialize => 4, - ApiError::Read => 5, - ApiError::ValueNotFound => 6, - ApiError::ContractNotFound => 7, - ApiError::GetKey => 8, - ApiError::UnexpectedKeyVariant => 9, - ApiError::UnexpectedContractRefVariant => 10, - ApiError::InvalidPurseName => 11, - ApiError::InvalidPurse => 12, - ApiError::UpgradeContractAtURef => 13, - ApiError::Transfer => 14, - ApiError::NoAccessRights => 15, - ApiError::CLTypeMismatch => 16, - ApiError::EarlyEndOfStream => 17, - ApiError::Formatting => 18, - ApiError::LeftOverBytes => 19, - ApiError::OutOfMemory => 20, - ApiError::MaxKeysLimit => 21, - ApiError::DuplicateKey => 22, - ApiError::PermissionDenied => 23, - ApiError::MissingKey => 24, - ApiError::ThresholdViolation => 25, - ApiError::KeyManagementThreshold => 26, - ApiError::DeploymentThreshold => 27, - ApiError::InsufficientTotalWeight => 28, - ApiError::InvalidSystemContract => 29, - ApiError::PurseNotCreated => 30, - ApiError::Unhandled => 31, - ApiError::BufferTooSmall => 32, - ApiError::HostBufferEmpty => 33, - ApiError::HostBufferFull => 34, - ApiError::AllocLayout => 35, - ApiError::DictionaryItemKeyExceedsLength => 36, - ApiError::InvalidDictionaryItemKey => 37, - ApiError::MissingSystemContractHash => 38, - ApiError::ExceededRecursionDepth => 39, - ApiError::NonRepresentableSerialization => 40, - ApiError::MessageTopicAlreadyRegistered => 41, - ApiError::MaxTopicsNumberExceeded => 42, - ApiError::MaxTopicNameSizeExceeded => 43, - ApiError::MessageTopicNotRegistered => 44, - ApiError::MessageTopicFull => 45, - ApiError::MessageTooLarge => 46, - ApiError::AuctionError(value) => AUCTION_ERROR_OFFSET + u32::from(value), - ApiError::ContractHeader(value) => HEADER_ERROR_OFFSET + u32::from(value), - ApiError::Mint(value) => MINT_ERROR_OFFSET + u32::from(value), - ApiError::HandlePayment(value) => POS_ERROR_OFFSET + u32::from(value), - ApiError::User(value) => RESERVED_ERROR_MAX + 1 + u32::from(value), - } - } -} - -impl From for ApiError { - fn from(value: u32) -> ApiError { - match value { - 1 => ApiError::None, - 2 => ApiError::MissingArgument, - 3 => ApiError::InvalidArgument, - 4 => ApiError::Deserialize, - 5 => ApiError::Read, - 6 => ApiError::ValueNotFound, - 7 => ApiError::ContractNotFound, - 8 => ApiError::GetKey, - 9 => ApiError::UnexpectedKeyVariant, - 10 => ApiError::UnexpectedContractRefVariant, - 11 => ApiError::InvalidPurseName, - 12 => ApiError::InvalidPurse, - 13 => ApiError::UpgradeContractAtURef, - 14 => ApiError::Transfer, - 15 => ApiError::NoAccessRights, - 16 => ApiError::CLTypeMismatch, - 17 => ApiError::EarlyEndOfStream, - 18 => ApiError::Formatting, - 19 => ApiError::LeftOverBytes, - 20 => ApiError::OutOfMemory, - 21 => ApiError::MaxKeysLimit, - 22 => ApiError::DuplicateKey, - 23 => ApiError::PermissionDenied, - 24 => ApiError::MissingKey, - 25 => ApiError::ThresholdViolation, - 26 => ApiError::KeyManagementThreshold, - 27 => ApiError::DeploymentThreshold, - 28 => ApiError::InsufficientTotalWeight, - 29 => ApiError::InvalidSystemContract, - 30 => ApiError::PurseNotCreated, - 31 => ApiError::Unhandled, - 32 => ApiError::BufferTooSmall, - 33 => ApiError::HostBufferEmpty, - 34 => ApiError::HostBufferFull, - 35 => ApiError::AllocLayout, - 36 => ApiError::DictionaryItemKeyExceedsLength, - 37 => ApiError::InvalidDictionaryItemKey, - 38 => ApiError::MissingSystemContractHash, - 39 => ApiError::ExceededRecursionDepth, - 40 => ApiError::NonRepresentableSerialization, - 41 => ApiError::MessageTopicAlreadyRegistered, - 42 => ApiError::MaxTopicsNumberExceeded, - 43 => ApiError::MaxTopicNameSizeExceeded, - 44 => ApiError::MessageTopicNotRegistered, - 45 => ApiError::MessageTopicFull, - 46 => ApiError::MessageTooLarge, - USER_ERROR_MIN..=USER_ERROR_MAX => ApiError::User(value as u16), - HP_ERROR_MIN..=HP_ERROR_MAX => ApiError::HandlePayment(value as u8), - MINT_ERROR_MIN..=MINT_ERROR_MAX => ApiError::Mint(value as u8), - HEADER_ERROR_MIN..=HEADER_ERROR_MAX => ApiError::ContractHeader(value as u8), - AUCTION_ERROR_MIN..=AUCTION_ERROR_MAX => ApiError::AuctionError(value as u8), - _ => ApiError::Unhandled, - } - } -} - -impl Debug for ApiError { - fn fmt(&self, f: &mut Formatter) -> fmt::Result { - match self { - ApiError::None => write!(f, "ApiError::None")?, - ApiError::MissingArgument => write!(f, "ApiError::MissingArgument")?, - ApiError::InvalidArgument => write!(f, "ApiError::InvalidArgument")?, - ApiError::Deserialize => write!(f, "ApiError::Deserialize")?, - ApiError::Read => write!(f, "ApiError::Read")?, - ApiError::ValueNotFound => write!(f, "ApiError::ValueNotFound")?, - ApiError::ContractNotFound => write!(f, "ApiError::ContractNotFound")?, - ApiError::GetKey => write!(f, "ApiError::GetKey")?, - ApiError::UnexpectedKeyVariant => write!(f, "ApiError::UnexpectedKeyVariant")?, - ApiError::UnexpectedContractRefVariant => { - write!(f, "ApiError::UnexpectedContractRefVariant")? - } - ApiError::InvalidPurseName => write!(f, "ApiError::InvalidPurseName")?, - ApiError::InvalidPurse => write!(f, "ApiError::InvalidPurse")?, - ApiError::UpgradeContractAtURef => write!(f, "ApiError::UpgradeContractAtURef")?, - ApiError::Transfer => write!(f, "ApiError::Transfer")?, - ApiError::NoAccessRights => write!(f, "ApiError::NoAccessRights")?, - ApiError::CLTypeMismatch => write!(f, "ApiError::CLTypeMismatch")?, - ApiError::EarlyEndOfStream => write!(f, "ApiError::EarlyEndOfStream")?, - ApiError::Formatting => write!(f, "ApiError::Formatting")?, - ApiError::LeftOverBytes => write!(f, "ApiError::LeftOverBytes")?, - ApiError::OutOfMemory => write!(f, "ApiError::OutOfMemory")?, - ApiError::MaxKeysLimit => write!(f, "ApiError::MaxKeysLimit")?, - ApiError::DuplicateKey => write!(f, "ApiError::DuplicateKey")?, - ApiError::PermissionDenied => write!(f, "ApiError::PermissionDenied")?, - ApiError::MissingKey => write!(f, "ApiError::MissingKey")?, - ApiError::ThresholdViolation => write!(f, "ApiError::ThresholdViolation")?, - ApiError::KeyManagementThreshold => write!(f, "ApiError::KeyManagementThreshold")?, - ApiError::DeploymentThreshold => write!(f, "ApiError::DeploymentThreshold")?, - ApiError::InsufficientTotalWeight => write!(f, "ApiError::InsufficientTotalWeight")?, - ApiError::InvalidSystemContract => write!(f, "ApiError::InvalidSystemContract")?, - ApiError::PurseNotCreated => write!(f, "ApiError::PurseNotCreated")?, - ApiError::Unhandled => write!(f, "ApiError::Unhandled")?, - ApiError::BufferTooSmall => write!(f, "ApiError::BufferTooSmall")?, - ApiError::HostBufferEmpty => write!(f, "ApiError::HostBufferEmpty")?, - ApiError::HostBufferFull => write!(f, "ApiError::HostBufferFull")?, - ApiError::AllocLayout => write!(f, "ApiError::AllocLayout")?, - ApiError::DictionaryItemKeyExceedsLength => { - write!(f, "ApiError::DictionaryItemKeyTooLarge")? - } - ApiError::InvalidDictionaryItemKey => write!(f, "ApiError::InvalidDictionaryItemKey")?, - ApiError::MissingSystemContractHash => write!(f, "ApiError::MissingContractHash")?, - ApiError::NonRepresentableSerialization => { - write!(f, "ApiError::NonRepresentableSerialization")? - } - ApiError::MessageTopicAlreadyRegistered => { - write!(f, "ApiError::MessageTopicAlreadyRegistered")? - } - ApiError::MaxTopicsNumberExceeded => write!(f, "ApiError::MaxTopicsNumberExceeded")?, - ApiError::MaxTopicNameSizeExceeded => write!(f, "ApiError::MaxTopicNameSizeExceeded")?, - ApiError::MessageTopicNotRegistered => { - write!(f, "ApiError::MessageTopicNotRegistered")? - } - ApiError::MessageTopicFull => write!(f, "ApiError::MessageTopicFull")?, - ApiError::MessageTooLarge => write!(f, "ApiError::MessageTooLarge")?, - ApiError::ExceededRecursionDepth => write!(f, "ApiError::ExceededRecursionDepth")?, - ApiError::AuctionError(value) => write!( - f, - "ApiError::AuctionError({:?})", - auction::Error::try_from(*value).map_err(|_err| fmt::Error)? - )?, - ApiError::ContractHeader(value) => write!( - f, - "ApiError::ContractHeader({:?})", - addressable_entity::Error::try_from(*value).map_err(|_err| fmt::Error)? - )?, - ApiError::Mint(value) => write!( - f, - "ApiError::Mint({:?})", - mint::Error::try_from(*value).map_err(|_err| fmt::Error)? - )?, - ApiError::HandlePayment(value) => write!( - f, - "ApiError::HandlePayment({:?})", - handle_payment::Error::try_from(*value).map_err(|_err| fmt::Error)? - )?, - ApiError::User(value) => write!(f, "ApiError::User({})", value)?, - } - write!(f, " [{}]", u32::from(*self)) - } -} - -impl fmt::Display for ApiError { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - ApiError::User(value) => write!(f, "User error: {}", value), - ApiError::ContractHeader(value) => write!(f, "Contract header error: {}", value), - ApiError::Mint(value) => write!(f, "Mint error: {}", value), - ApiError::HandlePayment(value) => write!(f, "Handle Payment error: {}", value), - _ => ::fmt(self, f), - } - } -} - -// This function is not intended to be used by third party crates. -#[doc(hidden)] -pub fn i32_from(result: Result<(), T>) -> i32 -where - ApiError: From, -{ - match result { - Ok(()) => 0, - Err(error) => { - let api_error = ApiError::from(error); - u32::from(api_error) as i32 - } - } -} - -/// Converts an `i32` to a `Result<(), ApiError>`, where `0` represents `Ok(())`, and all other -/// inputs are mapped to `Err(ApiError::)`. The full list of mappings can be found in the -/// [docs for `ApiError`](ApiError#mappings). -pub fn result_from(value: i32) -> Result<(), ApiError> { - match value { - 0 => Ok(()), - _ => Err(ApiError::from(value as u32)), - } -} - -#[cfg(test)] -mod tests { - use std::{i32, u16, u8}; - - use super::*; - - fn round_trip(result: Result<(), ApiError>) { - let code = i32_from(result); - assert_eq!(result, result_from(code)); - } - - #[test] - fn error_values() { - assert_eq!(65_024_u32, u32::from(ApiError::Mint(0))); // MINT_ERROR_OFFSET == 65,024 - assert_eq!(65_279_u32, u32::from(ApiError::Mint(u8::MAX))); - assert_eq!(65_280_u32, u32::from(ApiError::HandlePayment(0))); // POS_ERROR_OFFSET == 65,280 - assert_eq!(65_535_u32, u32::from(ApiError::HandlePayment(u8::MAX))); - assert_eq!(65_536_u32, u32::from(ApiError::User(0))); // u16::MAX + 1 - assert_eq!(131_071_u32, u32::from(ApiError::User(u16::MAX))); // 2 * u16::MAX + 1 - } - - #[test] - fn error_descriptions_getkey() { - assert_eq!("ApiError::GetKey [8]", &format!("{:?}", ApiError::GetKey)); - assert_eq!("ApiError::GetKey [8]", &format!("{}", ApiError::GetKey)); - } - - #[test] - fn error_descriptions_contract_header() { - assert_eq!( - "ApiError::ContractHeader(PreviouslyUsedVersion) [64769]", - &format!( - "{:?}", - ApiError::ContractHeader(addressable_entity::Error::PreviouslyUsedVersion as u8) - ) - ); - assert_eq!( - "Contract header error: 0", - &format!("{}", ApiError::ContractHeader(0)) - ); - assert_eq!( - "Contract header error: 255", - &format!("{}", ApiError::ContractHeader(u8::MAX)) - ); - } - - #[test] - fn error_descriptions_mint() { - assert_eq!( - "ApiError::Mint(InsufficientFunds) [65024]", - &format!("{:?}", ApiError::Mint(0)) - ); - assert_eq!("Mint error: 0", &format!("{}", ApiError::Mint(0))); - assert_eq!("Mint error: 255", &format!("{}", ApiError::Mint(u8::MAX))); - } - - #[test] - fn error_descriptions_handle_payment() { - assert_eq!( - "ApiError::HandlePayment(NotBonded) [65280]", - &format!( - "{:?}", - ApiError::HandlePayment(handle_payment::Error::NotBonded as u8) - ) - ); - } - #[test] - fn error_descriptions_handle_payment_display() { - assert_eq!( - "Handle Payment error: 0", - &format!( - "{}", - ApiError::HandlePayment(handle_payment::Error::NotBonded as u8) - ) - ); - } - - #[test] - fn error_descriptions_user_errors() { - assert_eq!( - "ApiError::User(0) [65536]", - &format!("{:?}", ApiError::User(0)) - ); - - assert_eq!("User error: 0", &format!("{}", ApiError::User(0))); - assert_eq!( - "ApiError::User(65535) [131071]", - &format!("{:?}", ApiError::User(u16::MAX)) - ); - assert_eq!( - "User error: 65535", - &format!("{}", ApiError::User(u16::MAX)) - ); - } - - #[test] - fn error_edge_cases() { - assert_eq!(Err(ApiError::Unhandled), result_from(i32::MAX)); - assert_eq!( - Err(ApiError::ContractHeader(255)), - result_from(MINT_ERROR_OFFSET as i32 - 1) - ); - assert_eq!(Err(ApiError::Unhandled), result_from(-1)); - assert_eq!(Err(ApiError::Unhandled), result_from(i32::MIN)); - } - - #[test] - fn error_round_trips() { - round_trip(Ok(())); - round_trip(Err(ApiError::None)); - round_trip(Err(ApiError::MissingArgument)); - round_trip(Err(ApiError::InvalidArgument)); - round_trip(Err(ApiError::Deserialize)); - round_trip(Err(ApiError::Read)); - round_trip(Err(ApiError::ValueNotFound)); - round_trip(Err(ApiError::ContractNotFound)); - round_trip(Err(ApiError::GetKey)); - round_trip(Err(ApiError::UnexpectedKeyVariant)); - round_trip(Err(ApiError::UnexpectedContractRefVariant)); - round_trip(Err(ApiError::InvalidPurseName)); - round_trip(Err(ApiError::InvalidPurse)); - round_trip(Err(ApiError::UpgradeContractAtURef)); - round_trip(Err(ApiError::Transfer)); - round_trip(Err(ApiError::NoAccessRights)); - round_trip(Err(ApiError::CLTypeMismatch)); - round_trip(Err(ApiError::EarlyEndOfStream)); - round_trip(Err(ApiError::Formatting)); - round_trip(Err(ApiError::LeftOverBytes)); - round_trip(Err(ApiError::OutOfMemory)); - round_trip(Err(ApiError::MaxKeysLimit)); - round_trip(Err(ApiError::DuplicateKey)); - round_trip(Err(ApiError::PermissionDenied)); - round_trip(Err(ApiError::MissingKey)); - round_trip(Err(ApiError::ThresholdViolation)); - round_trip(Err(ApiError::KeyManagementThreshold)); - round_trip(Err(ApiError::DeploymentThreshold)); - round_trip(Err(ApiError::InsufficientTotalWeight)); - round_trip(Err(ApiError::InvalidSystemContract)); - round_trip(Err(ApiError::PurseNotCreated)); - round_trip(Err(ApiError::Unhandled)); - round_trip(Err(ApiError::BufferTooSmall)); - round_trip(Err(ApiError::HostBufferEmpty)); - round_trip(Err(ApiError::HostBufferFull)); - round_trip(Err(ApiError::AllocLayout)); - round_trip(Err(ApiError::NonRepresentableSerialization)); - round_trip(Err(ApiError::ContractHeader(0))); - round_trip(Err(ApiError::ContractHeader(u8::MAX))); - round_trip(Err(ApiError::Mint(0))); - round_trip(Err(ApiError::Mint(u8::MAX))); - round_trip(Err(ApiError::HandlePayment(0))); - round_trip(Err(ApiError::HandlePayment(u8::MAX))); - round_trip(Err(ApiError::User(0))); - round_trip(Err(ApiError::User(u16::MAX))); - round_trip(Err(ApiError::AuctionError(0))); - round_trip(Err(ApiError::AuctionError(u8::MAX))); - round_trip(Err(ApiError::MessageTopicAlreadyRegistered)); - round_trip(Err(ApiError::MaxTopicsNumberExceeded)); - round_trip(Err(ApiError::MaxTopicNameSizeExceeded)); - round_trip(Err(ApiError::MessageTopicNotRegistered)); - round_trip(Err(ApiError::MessageTopicFull)); - round_trip(Err(ApiError::MessageTooLarge)); - } -} diff --git a/casper_types_ver_2_0/src/auction_state.rs b/casper_types_ver_2_0/src/auction_state.rs deleted file mode 100644 index 85fa32ef..00000000 --- a/casper_types_ver_2_0/src/auction_state.rs +++ /dev/null @@ -1,203 +0,0 @@ -use alloc::collections::{btree_map::Entry, BTreeMap}; - -use alloc::vec::Vec; -#[cfg(feature = "json-schema")] -use once_cell::sync::Lazy; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; -#[cfg(feature = "json-schema")] -use serde_map_to_array::KeyValueJsonSchema; -use serde_map_to_array::{BTreeMapToArray, KeyValueLabels}; - -use crate::{ - system::auction::{Bid, BidKind, EraValidators, Staking, ValidatorBid}, - Digest, EraId, PublicKey, U512, -}; - -#[cfg(feature = "json-schema")] -static ERA_VALIDATORS: Lazy = Lazy::new(|| { - use crate::SecretKey; - - let secret_key_1 = SecretKey::ed25519_from_bytes([42; SecretKey::ED25519_LENGTH]).unwrap(); - let public_key_1 = PublicKey::from(&secret_key_1); - - let mut validator_weights = BTreeMap::new(); - validator_weights.insert(public_key_1, U512::from(10)); - - let mut era_validators = BTreeMap::new(); - era_validators.insert(EraId::from(10u64), validator_weights); - - era_validators -}); -#[cfg(feature = "json-schema")] -static AUCTION_INFO: Lazy = Lazy::new(|| { - use crate::{ - system::auction::{DelegationRate, Delegator}, - AccessRights, SecretKey, URef, - }; - use num_traits::Zero; - - let state_root_hash = Digest::from([11; Digest::LENGTH]); - let validator_secret_key = - SecretKey::ed25519_from_bytes([42; SecretKey::ED25519_LENGTH]).unwrap(); - let validator_public_key = PublicKey::from(&validator_secret_key); - - let mut bids = vec![]; - let validator_bid = ValidatorBid::unlocked( - validator_public_key.clone(), - URef::new([250; 32], AccessRights::READ_ADD_WRITE), - U512::from(20), - DelegationRate::zero(), - ); - bids.push(BidKind::Validator(Box::new(validator_bid))); - - let delegator_secret_key = - SecretKey::ed25519_from_bytes([43; SecretKey::ED25519_LENGTH]).unwrap(); - let delegator_public_key = PublicKey::from(&delegator_secret_key); - let delegator_bid = Delegator::unlocked( - delegator_public_key, - U512::from(10), - URef::new([251; 32], AccessRights::READ_ADD_WRITE), - validator_public_key, - ); - bids.push(BidKind::Delegator(Box::new(delegator_bid))); - - let height: u64 = 10; - let era_validators = ERA_VALIDATORS.clone(); - AuctionState::new(state_root_hash, height, era_validators, bids) -}); - -/// A validator's weight. -#[derive(Debug, Serialize, Deserialize, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[serde(deny_unknown_fields)] -pub struct JsonValidatorWeights { - public_key: PublicKey, - weight: U512, -} - -/// The validators for the given era. -#[derive(Debug, Serialize, Deserialize, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[serde(deny_unknown_fields)] -pub struct JsonEraValidators { - era_id: EraId, - validator_weights: Vec, -} - -/// Data structure summarizing auction contract data. -#[derive(Debug, Serialize, Deserialize, PartialEq, Eq, Clone)] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[serde(deny_unknown_fields)] -pub struct AuctionState { - /// Global state hash. - pub state_root_hash: Digest, - /// Block height. - pub block_height: u64, - /// Era validators. - pub era_validators: Vec, - /// All bids. - #[serde(with = "BTreeMapToArray::")] - bids: BTreeMap, -} - -impl AuctionState { - /// Create new instance of `AuctionState` - pub fn new( - state_root_hash: Digest, - block_height: u64, - era_validators: EraValidators, - bids: Vec, - ) -> Self { - let mut json_era_validators: Vec = Vec::new(); - for (era_id, validator_weights) in era_validators.iter() { - let mut json_validator_weights: Vec = Vec::new(); - for (public_key, weight) in validator_weights.iter() { - json_validator_weights.push(JsonValidatorWeights { - public_key: public_key.clone(), - weight: *weight, - }); - } - json_era_validators.push(JsonEraValidators { - era_id: *era_id, - validator_weights: json_validator_weights, - }); - } - - let staking = { - let mut staking: Staking = BTreeMap::new(); - for bid_kind in bids.iter().filter(|x| x.is_unified()) { - if let BidKind::Unified(bid) = bid_kind { - let public_key = bid.validator_public_key().clone(); - let validator_bid = ValidatorBid::unlocked( - bid.validator_public_key().clone(), - *bid.bonding_purse(), - *bid.staked_amount(), - *bid.delegation_rate(), - ); - staking.insert(public_key, (validator_bid, bid.delegators().clone())); - } - } - - for bid_kind in bids.iter().filter(|x| x.is_validator()) { - if let BidKind::Validator(validator_bid) = bid_kind { - let public_key = validator_bid.validator_public_key().clone(); - staking.insert(public_key, (*validator_bid.clone(), BTreeMap::new())); - } - } - - for bid_kind in bids.iter().filter(|x| x.is_delegator()) { - if let BidKind::Delegator(delegator_bid) = bid_kind { - let validator_public_key = delegator_bid.validator_public_key().clone(); - if let Entry::Occupied(mut occupant) = - staking.entry(validator_public_key.clone()) - { - let (_, delegators) = occupant.get_mut(); - delegators.insert( - delegator_bid.delegator_public_key().clone(), - *delegator_bid.clone(), - ); - } - } - } - staking - }; - - let mut bids: BTreeMap = BTreeMap::new(); - for (public_key, (validator_bid, delegators)) in staking { - let bid = Bid::from_non_unified(validator_bid, delegators); - bids.insert(public_key, bid); - } - - AuctionState { - state_root_hash, - block_height, - era_validators: json_era_validators, - bids, - } - } - - // This method is not intended to be used by third party crates. - #[doc(hidden)] - #[cfg(feature = "json-schema")] - pub fn example() -> &'static Self { - &AUCTION_INFO - } -} - -struct BidLabels; - -impl KeyValueLabels for BidLabels { - const KEY: &'static str = "public_key"; - const VALUE: &'static str = "bid"; -} - -#[cfg(feature = "json-schema")] -impl KeyValueJsonSchema for BidLabels { - const JSON_SCHEMA_KV_NAME: Option<&'static str> = Some("PublicKeyAndBid"); - const JSON_SCHEMA_KV_DESCRIPTION: Option<&'static str> = - Some("A bid associated with the given public key."); - const JSON_SCHEMA_KEY_DESCRIPTION: Option<&'static str> = Some("The public key of the bidder."); - const JSON_SCHEMA_VALUE_DESCRIPTION: Option<&'static str> = Some("The bid details."); -} diff --git a/casper_types_ver_2_0/src/binary_port.rs b/casper_types_ver_2_0/src/binary_port.rs deleted file mode 100644 index 42fc4a9f..00000000 --- a/casper_types_ver_2_0/src/binary_port.rs +++ /dev/null @@ -1,66 +0,0 @@ -//! The binary port. -mod binary_request; -mod binary_response; -mod binary_response_and_request; -mod binary_response_header; -mod error_code; -mod get_all_values_result; -mod get_request; -mod global_state_query_result; -mod information_request; -mod minimal_block_info; -#[cfg(any(feature = "std", test))] -mod node_status; -mod payload_type; -mod record_id; -mod state_request; -mod type_wrappers; - -pub use binary_request::{BinaryRequest, BinaryRequestHeader, BinaryRequestTag}; -pub use binary_response::BinaryResponse; -pub use binary_response_and_request::BinaryResponseAndRequest; -pub use binary_response_header::BinaryResponseHeader; -pub use error_code::ErrorCode; -pub use get_all_values_result::GetAllValuesResult; -pub use get_request::GetRequest; -pub use global_state_query_result::GlobalStateQueryResult; -pub use information_request::{InformationRequest, InformationRequestTag}; -#[cfg(any(feature = "std", test))] -pub use minimal_block_info::MinimalBlockInfo; -#[cfg(any(feature = "std", test))] -pub use node_status::NodeStatus; -pub use payload_type::{PayloadEntity, PayloadType}; -pub use record_id::RecordId; -pub use state_request::GlobalStateRequest; -pub use type_wrappers::{ - ConsensusStatus, ConsensusValidatorChanges, GetTrieFullResult, LastProgress, NetworkName, - SpeculativeExecutionResult, TransactionWithExecutionInfo, Uptime, -}; - -use alloc::vec::Vec; - -/// Stores raw bytes from the DB along with the flag indicating whether data come from legacy or -/// current version of the DB. -#[derive(Debug)] -pub struct DbRawBytesSpec { - is_legacy: bool, - raw_bytes: Vec, -} - -impl DbRawBytesSpec { - /// Creates a variant indicating that raw bytes are coming from the legacy database. - pub fn new_legacy(raw_bytes: &[u8]) -> Self { - Self { - is_legacy: true, - raw_bytes: raw_bytes.to_vec(), - } - } - - /// Creates a variant indicating that raw bytes are coming from the current database. - pub fn new_current(raw_bytes: &[u8]) -> Self { - Self { - is_legacy: false, - raw_bytes: raw_bytes.to_vec(), - } - } -} diff --git a/casper_types_ver_2_0/src/binary_port/binary_request.rs b/casper_types_ver_2_0/src/binary_port/binary_request.rs deleted file mode 100644 index a123a80c..00000000 --- a/casper_types_ver_2_0/src/binary_port/binary_request.rs +++ /dev/null @@ -1,297 +0,0 @@ -use core::convert::TryFrom; - -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - BlockHeader, Digest, ProtocolVersion, Timestamp, Transaction, -}; -use alloc::vec::Vec; - -use super::get_request::GetRequest; - -#[cfg(test)] -use rand::Rng; - -#[cfg(test)] -use crate::{testing::TestRng, Block, TestBlockV1Builder}; - -/// The header of a binary request. -#[derive(Debug, PartialEq)] -pub struct BinaryRequestHeader { - protocol_version: ProtocolVersion, - type_tag: u8, -} - -impl BinaryRequestHeader { - /// Creates new binary request header. - pub fn new(protocol_version: ProtocolVersion, type_tag: BinaryRequestTag) -> Self { - Self { - protocol_version, - type_tag: type_tag.into(), - } - } - - /// Returns the protocol version of the request. - pub fn protocol_version(&self) -> ProtocolVersion { - self.protocol_version - } - - /// Returns the type tag of the request. - pub fn type_tag(&self) -> u8 { - self.type_tag - } -} - -impl ToBytes for BinaryRequestHeader { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.protocol_version.write_bytes(writer)?; - self.type_tag.write_bytes(writer) - } - - fn serialized_length(&self) -> usize { - self.protocol_version.serialized_length() + self.type_tag.serialized_length() - } -} - -impl FromBytes for BinaryRequestHeader { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (protocol_version, remainder) = FromBytes::from_bytes(bytes)?; - let (type_tag, remainder) = u8::from_bytes(remainder)?; - Ok(( - BinaryRequestHeader { - protocol_version, - type_tag, - }, - remainder, - )) - } -} - -/// A request to the binary access interface. -#[derive(Debug, PartialEq)] -pub enum BinaryRequest { - /// Request to get data from the node - Get(GetRequest), - /// Request to add a transaction into a blockchain. - TryAcceptTransaction { - /// Transaction to be handled. - transaction: Transaction, - }, - /// Request to execute a transaction speculatively. - TrySpeculativeExec { - /// State root on top of which to execute deploy. - state_root_hash: Digest, - /// Block time. - block_time: Timestamp, - /// Protocol version used when creating the original block. - protocol_version: ProtocolVersion, - /// Transaction to execute. - transaction: Transaction, - /// Block header of block at which we should perform speculative execution. - speculative_exec_at_block: BlockHeader, - }, -} - -impl BinaryRequest { - /// Returns the type tag of the request. - pub fn tag(&self) -> BinaryRequestTag { - match self { - BinaryRequest::Get(_) => BinaryRequestTag::Get, - BinaryRequest::TryAcceptTransaction { .. } => BinaryRequestTag::TryAcceptTransaction, - BinaryRequest::TrySpeculativeExec { .. } => BinaryRequestTag::TrySpeculativeExec, - } - } - - #[cfg(test)] - pub(crate) fn random(rng: &mut TestRng) -> Self { - match BinaryRequestTag::random(rng) { - BinaryRequestTag::Get => Self::Get(GetRequest::random(rng)), - BinaryRequestTag::TryAcceptTransaction => Self::TryAcceptTransaction { - transaction: Transaction::random(rng), - }, - BinaryRequestTag::TrySpeculativeExec => { - let block_v1 = TestBlockV1Builder::new().build(rng); - let block = Block::V1(block_v1); - - Self::TrySpeculativeExec { - state_root_hash: Digest::random(rng), - block_time: Timestamp::random(rng), - protocol_version: ProtocolVersion::from_parts(rng.gen(), rng.gen(), rng.gen()), - transaction: Transaction::random(rng), - speculative_exec_at_block: block.take_header(), - } - } - } - } -} - -impl ToBytes for BinaryRequest { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - match self { - BinaryRequest::Get(inner) => inner.write_bytes(writer), - BinaryRequest::TryAcceptTransaction { transaction } => transaction.write_bytes(writer), - BinaryRequest::TrySpeculativeExec { - transaction, - state_root_hash, - block_time, - protocol_version, - speculative_exec_at_block, - } => { - transaction.write_bytes(writer)?; - state_root_hash.write_bytes(writer)?; - block_time.write_bytes(writer)?; - protocol_version.write_bytes(writer)?; - speculative_exec_at_block.write_bytes(writer) - } - } - } - - fn serialized_length(&self) -> usize { - match self { - BinaryRequest::Get(inner) => inner.serialized_length(), - BinaryRequest::TryAcceptTransaction { transaction } => transaction.serialized_length(), - BinaryRequest::TrySpeculativeExec { - transaction, - state_root_hash, - block_time, - protocol_version, - speculative_exec_at_block, - } => { - transaction.serialized_length() - + state_root_hash.serialized_length() - + block_time.serialized_length() - + protocol_version.serialized_length() - + speculative_exec_at_block.serialized_length() - } - } - } -} - -impl TryFrom<(BinaryRequestTag, &[u8])> for BinaryRequest { - type Error = bytesrepr::Error; - - fn try_from((tag, bytes): (BinaryRequestTag, &[u8])) -> Result { - let (req, remainder) = match tag { - BinaryRequestTag::Get => { - let (get_request, remainder) = FromBytes::from_bytes(bytes)?; - (BinaryRequest::Get(get_request), remainder) - } - BinaryRequestTag::TryAcceptTransaction => { - let (transaction, remainder) = FromBytes::from_bytes(bytes)?; - ( - BinaryRequest::TryAcceptTransaction { transaction }, - remainder, - ) - } - BinaryRequestTag::TrySpeculativeExec => { - let (transaction, remainder) = FromBytes::from_bytes(bytes)?; - let (state_root_hash, remainder) = FromBytes::from_bytes(remainder)?; - let (block_time, remainder) = FromBytes::from_bytes(remainder)?; - let (protocol_version, remainder) = FromBytes::from_bytes(remainder)?; - let (speculative_exec_at_block, remainder) = FromBytes::from_bytes(remainder)?; - ( - BinaryRequest::TrySpeculativeExec { - transaction, - state_root_hash, - block_time, - protocol_version, - speculative_exec_at_block, - }, - remainder, - ) - } - }; - if !remainder.is_empty() { - return Err(bytesrepr::Error::LeftOverBytes); - } - Ok(req) - } -} - -/// The type tag of a binary request. -#[derive(Debug, PartialEq)] -#[repr(u8)] -pub enum BinaryRequestTag { - /// Request to get data from the node - Get = 0, - /// Request to add a transaction into a blockchain. - TryAcceptTransaction = 1, - /// Request to execute a transaction speculatively. - TrySpeculativeExec = 2, -} - -impl BinaryRequestTag { - /// Creates a random `BinaryRequestTag`. - #[cfg(test)] - pub fn random(rng: &mut TestRng) -> Self { - match rng.gen_range(0..3) { - 0 => BinaryRequestTag::Get, - 1 => BinaryRequestTag::TryAcceptTransaction, - 2 => BinaryRequestTag::TrySpeculativeExec, - _ => unreachable!(), - } - } -} - -impl TryFrom for BinaryRequestTag { - type Error = InvalidBinaryRequestTag; - - fn try_from(value: u8) -> Result { - match value { - 0 => Ok(BinaryRequestTag::Get), - 1 => Ok(BinaryRequestTag::TryAcceptTransaction), - 2 => Ok(BinaryRequestTag::TrySpeculativeExec), - _ => Err(InvalidBinaryRequestTag(value)), - } - } -} - -impl From for u8 { - fn from(value: BinaryRequestTag) -> Self { - value as u8 - } -} - -/// Error raised when trying to convert an invalid u8 into a `BinaryRequestTag`. -pub struct InvalidBinaryRequestTag(u8); - -#[cfg(test)] -mod tests { - use super::*; - use crate::testing::TestRng; - - #[test] - fn header_bytesrepr_roundtrip() { - let rng = &mut TestRng::new(); - - for tag in [ - BinaryRequestTag::Get, - BinaryRequestTag::TryAcceptTransaction, - BinaryRequestTag::TrySpeculativeExec, - ] { - let version = ProtocolVersion::from_parts(rng.gen(), rng.gen(), rng.gen()); - let val = BinaryRequestHeader::new(version, tag); - bytesrepr::test_serialization_roundtrip(&val); - } - } - - #[test] - fn request_bytesrepr_roundtrip() { - let rng = &mut TestRng::new(); - - let val = BinaryRequest::random(rng); - let bytes = val.to_bytes().expect("should serialize"); - assert_eq!(BinaryRequest::try_from((val.tag(), &bytes[..])), Ok(val)); - } -} diff --git a/casper_types_ver_2_0/src/binary_port/binary_response.rs b/casper_types_ver_2_0/src/binary_port/binary_response.rs deleted file mode 100644 index f821bc3b..00000000 --- a/casper_types_ver_2_0/src/binary_port/binary_response.rs +++ /dev/null @@ -1,177 +0,0 @@ -use crate::{ - bytesrepr::{self, Bytes, FromBytes, ToBytes}, - ProtocolVersion, -}; -use alloc::vec::Vec; - -#[cfg(test)] -use crate::testing::TestRng; - -use super::{ - binary_response_header::BinaryResponseHeader, - payload_type::{PayloadEntity, PayloadType}, - record_id::RecordId, - DbRawBytesSpec, ErrorCode, -}; - -/// The response used in the binary port protocol. -#[derive(Debug, PartialEq)] -pub struct BinaryResponse { - /// Header of the binary response. - header: BinaryResponseHeader, - /// The response. - payload: Vec, -} - -impl BinaryResponse { - /// Creates new empty binary response. - pub fn new_empty(protocol_version: ProtocolVersion) -> Self { - Self { - header: BinaryResponseHeader::new(None, protocol_version), - payload: vec![], - } - } - - /// Creates new binary response with error code. - pub fn new_error(error: ErrorCode, protocol_version: ProtocolVersion) -> Self { - BinaryResponse { - header: BinaryResponseHeader::new_error(error, protocol_version), - payload: vec![], - } - } - - /// Creates new binary response from raw DB bytes. - pub fn from_db_raw_bytes( - record_id: RecordId, - spec: Option, - protocol_version: ProtocolVersion, - ) -> Self { - match spec { - Some(DbRawBytesSpec { - is_legacy, - raw_bytes, - }) => BinaryResponse { - header: BinaryResponseHeader::new( - Some(PayloadType::new_from_record_id(record_id, is_legacy)), - protocol_version, - ), - payload: raw_bytes, - }, - None => BinaryResponse { - header: BinaryResponseHeader::new_error(ErrorCode::NotFound, protocol_version), - payload: vec![], - }, - } - } - - /// Creates a new binary response from a value. - pub fn from_value(val: V, protocol_version: ProtocolVersion) -> Self - where - V: ToBytes + PayloadEntity, - { - ToBytes::to_bytes(&val).map_or( - BinaryResponse::new_error(ErrorCode::InternalError, protocol_version), - |payload| BinaryResponse { - payload, - header: BinaryResponseHeader::new(Some(V::PAYLOAD_TYPE), protocol_version), - }, - ) - } - - /// Creates a new binary response from an optional value. - pub fn from_option(opt: Option, protocol_version: ProtocolVersion) -> Self - where - V: ToBytes + PayloadEntity, - { - match opt { - Some(val) => Self::from_value(val, protocol_version), - None => Self::new_empty(protocol_version), - } - } - - /// Returns true if response is success. - pub fn is_success(&self) -> bool { - self.header.is_success() - } - - /// Returns the error code. - pub fn error_code(&self) -> u8 { - self.header.error_code() - } - - /// Returns the payload type of the response. - pub fn returned_data_type_tag(&self) -> Option { - self.header.returned_data_type_tag() - } - - /// Returns true if the response means that data has not been found. - pub fn is_not_found(&self) -> bool { - self.header.is_not_found() - } - - /// Returns the payload. - pub fn payload(&self) -> &[u8] { - self.payload.as_ref() - } - - /// Returns the protocol version. - pub fn protocol_version(&self) -> ProtocolVersion { - self.header.protocol_version() - } - - #[cfg(test)] - pub(crate) fn random(rng: &mut TestRng) -> Self { - Self { - header: BinaryResponseHeader::random(rng), - payload: rng.random_vec(64..128), - } - } -} - -impl ToBytes for BinaryResponse { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - let BinaryResponse { header, payload } = self; - - header.write_bytes(writer)?; - payload.write_bytes(writer) - } - - fn serialized_length(&self) -> usize { - self.header.serialized_length() + self.payload.serialized_length() - } -} - -impl FromBytes for BinaryResponse { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (header, remainder) = FromBytes::from_bytes(bytes)?; - let (payload, remainder) = Bytes::from_bytes(remainder)?; - - Ok(( - BinaryResponse { - header, - payload: payload.into(), - }, - remainder, - )) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::testing::TestRng; - - #[test] - fn bytesrepr_roundtrip() { - let rng = &mut TestRng::new(); - - let val = BinaryResponse::random(rng); - bytesrepr::test_serialization_roundtrip(&val); - } -} diff --git a/casper_types_ver_2_0/src/binary_port/binary_response_and_request.rs b/casper_types_ver_2_0/src/binary_port/binary_response_and_request.rs deleted file mode 100644 index 78d4785d..00000000 --- a/casper_types_ver_2_0/src/binary_port/binary_response_and_request.rs +++ /dev/null @@ -1,155 +0,0 @@ -use crate::bytesrepr::{self, Bytes, FromBytes, ToBytes}; - -use super::binary_response::BinaryResponse; -#[cfg(any(feature = "testing", test))] -use super::payload_type::PayloadEntity; -use alloc::vec::Vec; - -#[cfg(any(feature = "testing", test))] -use super::record_id::RecordId; -#[cfg(any(feature = "testing", test))] -use crate::ProtocolVersion; - -#[cfg(test)] -use crate::testing::TestRng; - -/// The binary response along with the original binary request attached. -#[derive(Debug, PartialEq)] -pub struct BinaryResponseAndRequest { - /// The original request (as serialized bytes). - original_request: Vec, - /// The response. - response: BinaryResponse, -} - -impl BinaryResponseAndRequest { - /// Creates new binary response with the original request attached. - pub fn new(data: BinaryResponse, original_request: &[u8]) -> Self { - Self { - original_request: original_request.to_vec(), - response: data, - } - } - - /// Returns a new binary response with specified data and no original request. - #[cfg(any(feature = "testing", test))] - pub fn new_test_response( - record_id: RecordId, - data: &A, - protocol_version: ProtocolVersion, - ) -> BinaryResponseAndRequest { - use super::DbRawBytesSpec; - - let response = BinaryResponse::from_db_raw_bytes( - record_id, - Some(DbRawBytesSpec::new_current(&data.to_bytes().unwrap())), - protocol_version, - ); - Self::new(response, &[]) - } - - /// Returns a new binary response with specified legacy data and no original request. - #[cfg(any(feature = "testing", test))] - pub fn new_legacy_test_response( - record_id: RecordId, - data: &A, - protocol_version: ProtocolVersion, - ) -> BinaryResponseAndRequest { - use super::DbRawBytesSpec; - - let response = BinaryResponse::from_db_raw_bytes( - record_id, - Some(DbRawBytesSpec::new_legacy( - &bincode::serialize(data).unwrap(), - )), - protocol_version, - ); - Self::new(response, &[]) - } - - /// Returns true if response is success. - pub fn is_success(&self) -> bool { - self.response.is_success() - } - - /// Returns the error code. - pub fn error_code(&self) -> u8 { - self.response.error_code() - } - - #[cfg(test)] - pub(crate) fn random(rng: &mut TestRng) -> Self { - Self { - original_request: rng.random_vec(64..128), - response: BinaryResponse::random(rng), - } - } - - /// Returns serialized bytes representing the original request. - pub fn original_request(&self) -> &[u8] { - self.original_request.as_ref() - } - - /// Returns the inner binary response. - pub fn response(&self) -> &BinaryResponse { - &self.response - } -} - -impl ToBytes for BinaryResponseAndRequest { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - let BinaryResponseAndRequest { - original_request, - response, - } = self; - - original_request.write_bytes(writer)?; - response.write_bytes(writer) - } - - fn serialized_length(&self) -> usize { - self.original_request.serialized_length() + self.response.serialized_length() - } -} - -impl FromBytes for BinaryResponseAndRequest { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (original_request, remainder) = Bytes::from_bytes(bytes)?; - let (response, remainder) = FromBytes::from_bytes(remainder)?; - - Ok(( - BinaryResponseAndRequest { - original_request: original_request.into(), - response, - }, - remainder, - )) - } -} - -impl From for BinaryResponse { - fn from(response_and_request: BinaryResponseAndRequest) -> Self { - let BinaryResponseAndRequest { response, .. } = response_and_request; - response - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::testing::TestRng; - - #[test] - fn bytesrepr_roundtrip() { - let rng = &mut TestRng::new(); - - let val = BinaryResponseAndRequest::random(rng); - bytesrepr::test_serialization_roundtrip(&val); - } -} diff --git a/casper_types_ver_2_0/src/binary_port/binary_response_header.rs b/casper_types_ver_2_0/src/binary_port/binary_response_header.rs deleted file mode 100644 index 025a9068..00000000 --- a/casper_types_ver_2_0/src/binary_port/binary_response_header.rs +++ /dev/null @@ -1,134 +0,0 @@ -#[cfg(test)] -use crate::testing::TestRng; -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - ProtocolVersion, -}; -use alloc::vec::Vec; -#[cfg(test)] -use rand::Rng; - -use super::{ErrorCode, PayloadType}; - -/// Header of the binary response. -#[derive(Debug, PartialEq)] -pub struct BinaryResponseHeader { - protocol_version: ProtocolVersion, - error: u8, - returned_data_type_tag: Option, -} - -impl BinaryResponseHeader { - /// Creates new binary response header representing success. - pub fn new(returned_data_type: Option, protocol_version: ProtocolVersion) -> Self { - Self { - protocol_version, - error: ErrorCode::NoError as u8, - returned_data_type_tag: returned_data_type.map(|ty| ty as u8), - } - } - - /// Creates new binary response header representing error. - pub fn new_error(error: ErrorCode, protocol_version: ProtocolVersion) -> Self { - Self { - protocol_version, - error: error as u8, - returned_data_type_tag: None, - } - } - - /// Returns the type of the returned data. - pub fn returned_data_type_tag(&self) -> Option { - self.returned_data_type_tag - } - - /// Returns the error code. - pub fn error_code(&self) -> u8 { - self.error - } - - /// Returns true if the response represents success. - pub fn is_success(&self) -> bool { - self.error == ErrorCode::NoError as u8 - } - - /// Returns true if the response indicates the data was not found. - pub fn is_not_found(&self) -> bool { - self.error == ErrorCode::NotFound as u8 - } - - /// Returns the protocol version. - pub fn protocol_version(&self) -> ProtocolVersion { - self.protocol_version - } - - #[cfg(test)] - pub(crate) fn random(rng: &mut TestRng) -> Self { - let protocol_version = ProtocolVersion::from_parts(rng.gen(), rng.gen(), rng.gen()); - let error = rng.gen(); - let returned_data_type_tag = if rng.gen() { None } else { Some(rng.gen()) }; - - BinaryResponseHeader { - protocol_version, - error, - returned_data_type_tag, - } - } -} - -impl ToBytes for BinaryResponseHeader { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - let Self { - protocol_version, - error, - returned_data_type_tag, - } = self; - - protocol_version.write_bytes(writer)?; - error.write_bytes(writer)?; - returned_data_type_tag.write_bytes(writer) - } - - fn serialized_length(&self) -> usize { - self.protocol_version.serialized_length() - + self.error.serialized_length() - + self.returned_data_type_tag.serialized_length() - } -} - -impl FromBytes for BinaryResponseHeader { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (protocol_version, remainder) = FromBytes::from_bytes(bytes)?; - let (error, remainder) = FromBytes::from_bytes(remainder)?; - let (returned_data_type_tag, remainder) = FromBytes::from_bytes(remainder)?; - - Ok(( - BinaryResponseHeader { - protocol_version, - error, - returned_data_type_tag, - }, - remainder, - )) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::testing::TestRng; - - #[test] - fn bytesrepr_roundtrip() { - let rng = &mut TestRng::new(); - - let val = BinaryResponseHeader::random(rng); - bytesrepr::test_serialization_roundtrip(&val); - } -} diff --git a/casper_types_ver_2_0/src/binary_port/error_code.rs b/casper_types_ver_2_0/src/binary_port/error_code.rs deleted file mode 100644 index 76920537..00000000 --- a/casper_types_ver_2_0/src/binary_port/error_code.rs +++ /dev/null @@ -1,79 +0,0 @@ -use core::{convert::TryFrom, fmt}; - -/// The error code indicating the result of handling the binary request. -#[derive(Debug, Clone)] -#[cfg_attr(feature = "std", derive(thiserror::Error))] -#[repr(u8)] -pub enum ErrorCode { - /// Request executed correctly. - #[cfg_attr(feature = "std", error("request executed correctly"))] - NoError = 0, - /// This function is disabled. - #[cfg_attr(feature = "std", error("this function is disabled"))] - FunctionDisabled = 1, - /// Data not found. - #[cfg_attr(feature = "std", error("data not found"))] - NotFound = 2, - /// Root not found. - #[cfg_attr(feature = "std", error("root not found"))] - RootNotFound = 3, - /// Invalid deploy item variant. - #[cfg_attr(feature = "std", error("invalid deploy item variant"))] - InvalidDeployItemVariant = 4, - /// Wasm preprocessing. - #[cfg_attr(feature = "std", error("wasm preprocessing"))] - WasmPreprocessing = 5, - /// Invalid protocol version. - #[cfg_attr(feature = "std", error("unsupported protocol version"))] - UnsupportedProtocolVersion = 6, - /// Invalid transaction. - #[cfg_attr(feature = "std", error("invalid transaction"))] - InvalidTransaction = 7, - /// Internal error. - #[cfg_attr(feature = "std", error("internal error"))] - InternalError = 8, - /// The query to global state failed. - #[cfg_attr(feature = "std", error("the query to global state failed"))] - QueryFailedToExecute = 9, - /// Bad request. - #[cfg_attr(feature = "std", error("bad request"))] - BadRequest = 10, - /// Received an unsupported type of request. - #[cfg_attr(feature = "std", error("unsupported request"))] - UnsupportedRequest = 11, -} - -impl TryFrom for ErrorCode { - type Error = UnknownErrorCode; - - fn try_from(value: u8) -> Result { - match value { - 0 => Ok(ErrorCode::NoError), - 1 => Ok(ErrorCode::FunctionDisabled), - 2 => Ok(ErrorCode::NotFound), - 3 => Ok(ErrorCode::RootNotFound), - 4 => Ok(ErrorCode::InvalidDeployItemVariant), - 5 => Ok(ErrorCode::WasmPreprocessing), - 6 => Ok(ErrorCode::UnsupportedProtocolVersion), - 7 => Ok(ErrorCode::InvalidTransaction), - 8 => Ok(ErrorCode::InternalError), - 9 => Ok(ErrorCode::QueryFailedToExecute), - 10 => Ok(ErrorCode::BadRequest), - 11 => Ok(ErrorCode::UnsupportedRequest), - _ => Err(UnknownErrorCode), - } - } -} - -/// Error indicating that the error code is unknown. -#[derive(Debug, Clone, Copy)] -pub struct UnknownErrorCode; - -impl fmt::Display for UnknownErrorCode { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "unknown node error code") - } -} - -#[cfg(feature = "std")] -impl std::error::Error for UnknownErrorCode {} diff --git a/casper_types_ver_2_0/src/binary_port/get_all_values_result.rs b/casper_types_ver_2_0/src/binary_port/get_all_values_result.rs deleted file mode 100644 index 3ddada4a..00000000 --- a/casper_types_ver_2_0/src/binary_port/get_all_values_result.rs +++ /dev/null @@ -1,15 +0,0 @@ -use alloc::vec::Vec; - -use crate::StoredValue; - -/// Represents a result of a `get_all_values` request. -#[derive(Debug, PartialEq)] -pub enum GetAllValuesResult { - /// Invalid state root hash. - RootNotFound, - /// Contains values returned from the global state. - Success { - /// Current values. - values: Vec, - }, -} diff --git a/casper_types_ver_2_0/src/binary_port/get_request.rs b/casper_types_ver_2_0/src/binary_port/get_request.rs deleted file mode 100644 index 01fb8f23..00000000 --- a/casper_types_ver_2_0/src/binary_port/get_request.rs +++ /dev/null @@ -1,146 +0,0 @@ -use crate::bytesrepr::{self, Bytes, FromBytes, ToBytes, U8_SERIALIZED_LENGTH}; -use alloc::vec::Vec; - -#[cfg(test)] -use rand::Rng; - -#[cfg(test)] -use crate::testing::TestRng; - -use super::state_request::GlobalStateRequest; - -const RECORD_TAG: u8 = 0; -const INFORMATION_TAG: u8 = 1; -const STATE_TAG: u8 = 2; - -/// A request to get data from the node. -#[derive(Clone, Debug, PartialEq)] -pub enum GetRequest { - /// Retrieves a record from the node. - Record { - /// Type tag of the record to retrieve. - record_type_tag: u16, - /// Key encoded into bytes. - key: Vec, - }, - /// Retrieves information from the node. - Information { - /// Type tag of the information to retrieve. - info_type_tag: u16, - /// Key encoded into bytes. - key: Vec, - }, - /// Retrieves data from the global state. - State(GlobalStateRequest), -} - -impl GetRequest { - #[cfg(test)] - pub(crate) fn random(rng: &mut TestRng) -> Self { - match rng.gen_range(0..3) { - 0 => GetRequest::Record { - record_type_tag: rng.gen(), - key: rng.random_vec(16..32), - }, - 1 => GetRequest::Information { - info_type_tag: rng.gen(), - key: rng.random_vec(16..32), - }, - 2 => GetRequest::State(GlobalStateRequest::random(rng)), - _ => unreachable!(), - } - } -} - -impl ToBytes for GetRequest { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - match self { - GetRequest::Record { - record_type_tag, - key, - } => { - RECORD_TAG.write_bytes(writer)?; - record_type_tag.write_bytes(writer)?; - key.write_bytes(writer) - } - GetRequest::Information { info_type_tag, key } => { - INFORMATION_TAG.write_bytes(writer)?; - info_type_tag.write_bytes(writer)?; - key.write_bytes(writer) - } - GetRequest::State(req) => { - STATE_TAG.write_bytes(writer)?; - req.write_bytes(writer) - } - } - } - - fn serialized_length(&self) -> usize { - U8_SERIALIZED_LENGTH - + match self { - GetRequest::Record { - record_type_tag, - key, - } => record_type_tag.serialized_length() + key.serialized_length(), - GetRequest::Information { info_type_tag, key } => { - info_type_tag.serialized_length() + key.serialized_length() - } - GetRequest::State(req) => req.serialized_length(), - } - } -} - -impl FromBytes for GetRequest { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (tag, remainder) = FromBytes::from_bytes(bytes)?; - match tag { - RECORD_TAG => { - let (record_type_tag, remainder) = FromBytes::from_bytes(remainder)?; - let (key, remainder) = Bytes::from_bytes(remainder)?; - Ok(( - GetRequest::Record { - record_type_tag, - key: key.into(), - }, - remainder, - )) - } - INFORMATION_TAG => { - let (info_type_tag, remainder) = FromBytes::from_bytes(remainder)?; - let (key, remainder) = Bytes::from_bytes(remainder)?; - Ok(( - GetRequest::Information { - info_type_tag, - key: key.into(), - }, - remainder, - )) - } - STATE_TAG => { - let (req, remainder) = FromBytes::from_bytes(remainder)?; - Ok((GetRequest::State(req), remainder)) - } - _ => Err(bytesrepr::Error::Formatting), - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::testing::TestRng; - - #[test] - fn bytesrepr_roundtrip() { - let rng = &mut TestRng::new(); - - let val = GetRequest::random(rng); - bytesrepr::test_serialization_roundtrip(&val); - } -} diff --git a/casper_types_ver_2_0/src/binary_port/global_state_query_result.rs b/casper_types_ver_2_0/src/binary_port/global_state_query_result.rs deleted file mode 100644 index 07619201..00000000 --- a/casper_types_ver_2_0/src/binary_port/global_state_query_result.rs +++ /dev/null @@ -1,99 +0,0 @@ -//! The result of the query for the global state value. - -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - StoredValue, -}; -use alloc::{string::String, vec::Vec}; - -#[cfg(test)] -use crate::testing::TestRng; - -#[cfg(test)] -use crate::{ByteCode, ByteCodeKind}; - -/// Carries the successful result of the global state query. -#[derive(Debug, PartialEq, Clone)] -pub struct GlobalStateQueryResult { - /// Stored value. - value: StoredValue, - /// Proof. - merkle_proof: String, -} - -impl GlobalStateQueryResult { - /// Creates the global state query result. - pub fn new(value: StoredValue, merkle_proof: String) -> Self { - Self { - value, - merkle_proof, - } - } - - /// Returns the stored value and the merkle proof. - pub fn into_inner(self) -> (StoredValue, String) { - (self.value, self.merkle_proof) - } - - #[cfg(test)] - pub(crate) fn random_invalid(rng: &mut TestRng) -> Self { - // Note: This does NOT create a logically-valid struct. Instance created by this function - // should be used in `bytesrepr` tests only. - Self { - value: StoredValue::ByteCode(ByteCode::new( - ByteCodeKind::V1CasperWasm, - rng.random_vec(10..20), - )), - merkle_proof: rng.random_string(10..20), - } - } -} - -impl ToBytes for GlobalStateQueryResult { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - let GlobalStateQueryResult { - value, - merkle_proof, - } = self; - value.write_bytes(writer)?; - merkle_proof.write_bytes(writer) - } - - fn serialized_length(&self) -> usize { - self.value.serialized_length() + self.merkle_proof.serialized_length() - } -} - -impl FromBytes for GlobalStateQueryResult { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (value, remainder) = FromBytes::from_bytes(bytes)?; - let (merkle_proof, remainder) = FromBytes::from_bytes(remainder)?; - Ok(( - GlobalStateQueryResult { - value, - merkle_proof, - }, - remainder, - )) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::testing::TestRng; - - #[test] - fn bytesrepr_roundtrip() { - let rng = &mut TestRng::new(); - - let val = GlobalStateQueryResult::random_invalid(rng); - bytesrepr::test_serialization_roundtrip(&val); - } -} diff --git a/casper_types_ver_2_0/src/binary_port/information_request.rs b/casper_types_ver_2_0/src/binary_port/information_request.rs deleted file mode 100644 index 79756aba..00000000 --- a/casper_types_ver_2_0/src/binary_port/information_request.rs +++ /dev/null @@ -1,370 +0,0 @@ -use alloc::vec::Vec; -use core::convert::TryFrom; - -#[cfg(test)] -use rand::Rng; - -#[cfg(test)] -use crate::testing::TestRng; -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - BlockIdentifier, TransactionHash, -}; - -use super::GetRequest; - -/// Request for information from the node. -#[derive(Clone, Debug, PartialEq)] -pub enum InformationRequest { - /// Returns the block header by an identifier, no identifier indicates the latest block. - BlockHeader(Option), - /// Returns the signed block by an identifier, no identifier indicates the latest block. - SignedBlock(Option), - /// Returns a transaction with approvals and execution info for a given hash. - Transaction(TransactionHash), - /// Returns connected peers. - Peers, - /// Returns node uptime. - Uptime, - /// Returns last progress of the sync process. - LastProgress, - /// Returns current state of the main reactor. - ReactorState, - /// Returns network name. - NetworkName, - /// Returns consensus validator changes. - ConsensusValidatorChanges, - /// Returns status of the BlockSynchronizer. - BlockSynchronizerStatus, - /// Returns the available block range. - AvailableBlockRange, - /// Returns info about next upgrade. - NextUpgrade, - /// Returns consensus status. - ConsensusStatus, - /// Returns chainspec raw bytes. - ChainspecRawBytes, - /// Returns the status information of the node. - NodeStatus, -} - -impl InformationRequest { - /// Returns the tag of the request. - pub fn tag(&self) -> InformationRequestTag { - match self { - InformationRequest::BlockHeader(_) => InformationRequestTag::BlockHeader, - InformationRequest::SignedBlock(_) => InformationRequestTag::SignedBlock, - InformationRequest::Transaction(_) => InformationRequestTag::Transaction, - InformationRequest::Peers => InformationRequestTag::Peers, - InformationRequest::Uptime => InformationRequestTag::Uptime, - InformationRequest::LastProgress => InformationRequestTag::LastProgress, - InformationRequest::ReactorState => InformationRequestTag::ReactorState, - InformationRequest::NetworkName => InformationRequestTag::NetworkName, - InformationRequest::ConsensusValidatorChanges => { - InformationRequestTag::ConsensusValidatorChanges - } - InformationRequest::BlockSynchronizerStatus => { - InformationRequestTag::BlockSynchronizerStatus - } - InformationRequest::AvailableBlockRange => InformationRequestTag::AvailableBlockRange, - InformationRequest::NextUpgrade => InformationRequestTag::NextUpgrade, - InformationRequest::ConsensusStatus => InformationRequestTag::ConsensusStatus, - InformationRequest::ChainspecRawBytes => InformationRequestTag::ChainspecRawBytes, - InformationRequest::NodeStatus => InformationRequestTag::NodeStatus, - } - } - - #[cfg(test)] - pub(crate) fn random(rng: &mut TestRng) -> Self { - match InformationRequestTag::random(rng) { - InformationRequestTag::BlockHeader => { - if rng.gen() { - InformationRequest::BlockHeader(None) - } else { - InformationRequest::BlockHeader(Some(BlockIdentifier::random(rng))) - } - } - InformationRequestTag::SignedBlock => { - if rng.gen() { - InformationRequest::SignedBlock(None) - } else { - InformationRequest::SignedBlock(Some(BlockIdentifier::random(rng))) - } - } - InformationRequestTag::Transaction => { - InformationRequest::Transaction(TransactionHash::random(rng)) - } - InformationRequestTag::Peers => InformationRequest::Peers, - InformationRequestTag::Uptime => InformationRequest::Uptime, - InformationRequestTag::LastProgress => InformationRequest::LastProgress, - InformationRequestTag::ReactorState => InformationRequest::ReactorState, - InformationRequestTag::NetworkName => InformationRequest::NetworkName, - InformationRequestTag::ConsensusValidatorChanges => { - InformationRequest::ConsensusValidatorChanges - } - InformationRequestTag::BlockSynchronizerStatus => { - InformationRequest::BlockSynchronizerStatus - } - InformationRequestTag::AvailableBlockRange => InformationRequest::AvailableBlockRange, - InformationRequestTag::NextUpgrade => InformationRequest::NextUpgrade, - InformationRequestTag::ConsensusStatus => InformationRequest::ConsensusStatus, - InformationRequestTag::ChainspecRawBytes => InformationRequest::ChainspecRawBytes, - InformationRequestTag::NodeStatus => InformationRequest::NodeStatus, - } - } -} - -impl ToBytes for InformationRequest { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - match self { - InformationRequest::BlockHeader(block_identifier) => { - block_identifier.write_bytes(writer) - } - InformationRequest::SignedBlock(block_identifier) => { - block_identifier.write_bytes(writer) - } - InformationRequest::Transaction(transaction_hash) => { - transaction_hash.write_bytes(writer) - } - InformationRequest::Peers - | InformationRequest::Uptime - | InformationRequest::LastProgress - | InformationRequest::ReactorState - | InformationRequest::NetworkName - | InformationRequest::ConsensusValidatorChanges - | InformationRequest::BlockSynchronizerStatus - | InformationRequest::AvailableBlockRange - | InformationRequest::NextUpgrade - | InformationRequest::ConsensusStatus - | InformationRequest::ChainspecRawBytes - | InformationRequest::NodeStatus => Ok(()), - } - } - - fn serialized_length(&self) -> usize { - match self { - InformationRequest::BlockHeader(block_identifier) => { - block_identifier.serialized_length() - } - InformationRequest::SignedBlock(block_identifier) => { - block_identifier.serialized_length() - } - InformationRequest::Transaction(transaction_hash) => { - transaction_hash.serialized_length() - } - InformationRequest::Peers - | InformationRequest::Uptime - | InformationRequest::LastProgress - | InformationRequest::ReactorState - | InformationRequest::NetworkName - | InformationRequest::ConsensusValidatorChanges - | InformationRequest::BlockSynchronizerStatus - | InformationRequest::AvailableBlockRange - | InformationRequest::NextUpgrade - | InformationRequest::ConsensusStatus - | InformationRequest::ChainspecRawBytes - | InformationRequest::NodeStatus => 0, - } - } -} - -impl TryFrom<(InformationRequestTag, &[u8])> for InformationRequest { - type Error = bytesrepr::Error; - - fn try_from((tag, key_bytes): (InformationRequestTag, &[u8])) -> Result { - let (req, remainder) = match tag { - InformationRequestTag::BlockHeader => { - let (block_identifier, remainder) = FromBytes::from_bytes(key_bytes)?; - (InformationRequest::BlockHeader(block_identifier), remainder) - } - InformationRequestTag::SignedBlock => { - let (block_identifier, remainder) = FromBytes::from_bytes(key_bytes)?; - (InformationRequest::SignedBlock(block_identifier), remainder) - } - InformationRequestTag::Transaction => { - let (transaction_hash, remainder) = FromBytes::from_bytes(key_bytes)?; - (InformationRequest::Transaction(transaction_hash), remainder) - } - InformationRequestTag::Peers => (InformationRequest::Peers, key_bytes), - InformationRequestTag::Uptime => (InformationRequest::Uptime, key_bytes), - InformationRequestTag::LastProgress => (InformationRequest::LastProgress, key_bytes), - InformationRequestTag::ReactorState => (InformationRequest::ReactorState, key_bytes), - InformationRequestTag::NetworkName => (InformationRequest::NetworkName, key_bytes), - InformationRequestTag::ConsensusValidatorChanges => { - (InformationRequest::ConsensusValidatorChanges, key_bytes) - } - InformationRequestTag::BlockSynchronizerStatus => { - (InformationRequest::BlockSynchronizerStatus, key_bytes) - } - InformationRequestTag::AvailableBlockRange => { - (InformationRequest::AvailableBlockRange, key_bytes) - } - InformationRequestTag::NextUpgrade => (InformationRequest::NextUpgrade, key_bytes), - InformationRequestTag::ConsensusStatus => { - (InformationRequest::ConsensusStatus, key_bytes) - } - InformationRequestTag::ChainspecRawBytes => { - (InformationRequest::ChainspecRawBytes, key_bytes) - } - InformationRequestTag::NodeStatus => (InformationRequest::NodeStatus, key_bytes), - }; - if !remainder.is_empty() { - return Err(bytesrepr::Error::LeftOverBytes); - } - Ok(req) - } -} - -impl TryFrom for GetRequest { - type Error = bytesrepr::Error; - - fn try_from(request: InformationRequest) -> Result { - Ok(GetRequest::Information { - info_type_tag: request.tag().into(), - key: request.to_bytes()?, - }) - } -} - -/// Identifier of an information request. -#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)] -#[repr(u16)] -pub enum InformationRequestTag { - /// Block header request. - BlockHeader = 0, - /// Signed block request. - SignedBlock = 1, - /// Transaction request. - Transaction = 2, - /// Peers request. - Peers = 3, - /// Uptime request. - Uptime = 4, - /// Last progress request. - LastProgress = 5, - /// Reactor state request. - ReactorState = 6, - /// Network name request. - NetworkName = 7, - /// Consensus validator changes request. - ConsensusValidatorChanges = 8, - /// Block synchronizer status request. - BlockSynchronizerStatus = 9, - /// Available block range request. - AvailableBlockRange = 10, - /// Next upgrade request. - NextUpgrade = 11, - /// Consensus status request. - ConsensusStatus = 12, - /// Chainspec raw bytes request. - ChainspecRawBytes = 13, - /// Node status request. - NodeStatus = 14, -} - -impl InformationRequestTag { - #[cfg(test)] - pub(crate) fn random(rng: &mut TestRng) -> Self { - match rng.gen_range(0..15) { - 0 => InformationRequestTag::BlockHeader, - 1 => InformationRequestTag::SignedBlock, - 2 => InformationRequestTag::Transaction, - 3 => InformationRequestTag::Peers, - 4 => InformationRequestTag::Uptime, - 5 => InformationRequestTag::LastProgress, - 6 => InformationRequestTag::ReactorState, - 7 => InformationRequestTag::NetworkName, - 8 => InformationRequestTag::ConsensusValidatorChanges, - 9 => InformationRequestTag::BlockSynchronizerStatus, - 10 => InformationRequestTag::AvailableBlockRange, - 11 => InformationRequestTag::NextUpgrade, - 12 => InformationRequestTag::ConsensusStatus, - 13 => InformationRequestTag::ChainspecRawBytes, - 14 => InformationRequestTag::NodeStatus, - _ => unreachable!(), - } - } -} - -impl TryFrom for InformationRequestTag { - type Error = UnknownInformationRequestTag; - - fn try_from(value: u16) -> Result { - match value { - 0 => Ok(InformationRequestTag::BlockHeader), - 1 => Ok(InformationRequestTag::SignedBlock), - 2 => Ok(InformationRequestTag::Transaction), - 3 => Ok(InformationRequestTag::Peers), - 4 => Ok(InformationRequestTag::Uptime), - 5 => Ok(InformationRequestTag::LastProgress), - 6 => Ok(InformationRequestTag::ReactorState), - 7 => Ok(InformationRequestTag::NetworkName), - 8 => Ok(InformationRequestTag::ConsensusValidatorChanges), - 9 => Ok(InformationRequestTag::BlockSynchronizerStatus), - 10 => Ok(InformationRequestTag::AvailableBlockRange), - 11 => Ok(InformationRequestTag::NextUpgrade), - 12 => Ok(InformationRequestTag::ConsensusStatus), - 13 => Ok(InformationRequestTag::ChainspecRawBytes), - 14 => Ok(InformationRequestTag::NodeStatus), - _ => Err(UnknownInformationRequestTag(value)), - } - } -} - -impl From for u16 { - fn from(value: InformationRequestTag) -> Self { - value as u16 - } -} - -/// Error returned when trying to convert a `u16` into a `DbId`. -#[derive(Debug, PartialEq, Eq)] -pub struct UnknownInformationRequestTag(u16); - -#[cfg(test)] -mod tests { - use super::*; - use crate::testing::TestRng; - - #[test] - fn tag_roundtrip() { - for tag in [ - InformationRequestTag::BlockHeader, - InformationRequestTag::SignedBlock, - InformationRequestTag::Transaction, - InformationRequestTag::Peers, - InformationRequestTag::Uptime, - InformationRequestTag::LastProgress, - InformationRequestTag::ReactorState, - InformationRequestTag::NetworkName, - InformationRequestTag::ConsensusValidatorChanges, - InformationRequestTag::BlockSynchronizerStatus, - InformationRequestTag::AvailableBlockRange, - InformationRequestTag::NextUpgrade, - InformationRequestTag::ConsensusStatus, - InformationRequestTag::ChainspecRawBytes, - InformationRequestTag::NodeStatus, - ] { - let value = u16::from(tag); - assert_eq!(InformationRequestTag::try_from(value), Ok(tag)); - } - } - - #[test] - fn bytesrepr_roundtrip() { - let rng = &mut TestRng::new(); - - let val = InformationRequest::random(rng); - let bytes = val.to_bytes().expect("should serialize"); - assert_eq!( - InformationRequest::try_from((val.tag(), &bytes[..])), - Ok(val) - ); - } -} diff --git a/casper_types_ver_2_0/src/binary_port/minimal_block_info.rs b/casper_types_ver_2_0/src/binary_port/minimal_block_info.rs deleted file mode 100644 index 7e470895..00000000 --- a/casper_types_ver_2_0/src/binary_port/minimal_block_info.rs +++ /dev/null @@ -1,123 +0,0 @@ -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - Block, BlockHash, Digest, EraId, PublicKey, Timestamp, -}; -use alloc::vec::Vec; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -#[cfg(any(feature = "std", test))] -use serde::{Deserialize, Serialize}; - -#[cfg(test)] -use rand::Rng; - -#[cfg(test)] -use crate::testing::TestRng; - -/// Minimal info about a `Block` needed to satisfy the node status request. -#[derive(Debug, PartialEq, Eq)] -#[cfg_attr(any(feature = "std", test), derive(Serialize, Deserialize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[cfg_attr(any(feature = "std", test), serde(deny_unknown_fields))] -pub struct MinimalBlockInfo { - hash: BlockHash, - timestamp: Timestamp, - era_id: EraId, - height: u64, - state_root_hash: Digest, - creator: PublicKey, -} - -impl MinimalBlockInfo { - #[cfg(test)] - pub(crate) fn random(rng: &mut TestRng) -> Self { - Self { - hash: BlockHash::random(rng), - timestamp: Timestamp::random(rng), - era_id: EraId::random(rng), - height: rng.gen(), - state_root_hash: Digest::random(rng), - creator: PublicKey::random(rng), - } - } -} - -impl FromBytes for MinimalBlockInfo { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (hash, remainder) = BlockHash::from_bytes(bytes)?; - let (timestamp, remainder) = Timestamp::from_bytes(remainder)?; - let (era_id, remainder) = EraId::from_bytes(remainder)?; - let (height, remainder) = u64::from_bytes(remainder)?; - let (state_root_hash, remainder) = Digest::from_bytes(remainder)?; - let (creator, remainder) = PublicKey::from_bytes(remainder)?; - Ok(( - MinimalBlockInfo { - hash, - timestamp, - era_id, - height, - state_root_hash, - creator, - }, - remainder, - )) - } -} - -impl ToBytes for MinimalBlockInfo { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.hash.write_bytes(writer)?; - self.timestamp.write_bytes(writer)?; - self.era_id.write_bytes(writer)?; - self.height.write_bytes(writer)?; - self.state_root_hash.write_bytes(writer)?; - self.creator.write_bytes(writer) - } - - fn serialized_length(&self) -> usize { - self.hash.serialized_length() - + self.timestamp.serialized_length() - + self.era_id.serialized_length() - + self.height.serialized_length() - + self.state_root_hash.serialized_length() - + self.creator.serialized_length() - } -} - -impl From for MinimalBlockInfo { - fn from(block: Block) -> Self { - let proposer = match &block { - Block::V1(v1) => v1.proposer().clone(), - Block::V2(v2) => v2.proposer().clone(), - }; - - MinimalBlockInfo { - hash: *block.hash(), - timestamp: block.timestamp(), - era_id: block.era_id(), - height: block.height(), - state_root_hash: *block.state_root_hash(), - creator: proposer, - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::testing::TestRng; - - #[test] - fn bytesrepr_roundtrip() { - let rng = &mut TestRng::new(); - - let val = MinimalBlockInfo::random(rng); - bytesrepr::test_serialization_roundtrip(&val); - } -} diff --git a/casper_types_ver_2_0/src/binary_port/node_status.rs b/casper_types_ver_2_0/src/binary_port/node_status.rs deleted file mode 100644 index fb255f8e..00000000 --- a/casper_types_ver_2_0/src/binary_port/node_status.rs +++ /dev/null @@ -1,173 +0,0 @@ -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - AvailableBlockRange, BlockSynchronizerStatus, Digest, NextUpgrade, Peers, PublicKey, - ReactorState, TimeDiff, Timestamp, -}; -use alloc::{string::String, vec::Vec}; - -#[cfg(test)] -use rand::Rng; - -#[cfg(test)] -use crate::testing::TestRng; - -use super::MinimalBlockInfo; - -/// Status information about the node. -#[derive(Debug, PartialEq)] -pub struct NodeStatus { - /// The node ID and network address of each connected peer. - pub peers: Peers, - /// The compiled node version. - pub build_version: String, - /// The chainspec name. - pub chainspec_name: String, - /// The state root hash of the lowest block in the available block range. - pub starting_state_root_hash: Digest, - /// The minimal info of the last block from the linear chain. - pub last_added_block_info: Option, - /// Our public signing key. - pub our_public_signing_key: Option, - /// The next round length if this node is a validator. - pub round_length: Option, - /// Information about the next scheduled upgrade. - pub next_upgrade: Option, - /// Time that passed since the node has started. - pub uptime: TimeDiff, - /// The current state of node reactor. - pub reactor_state: ReactorState, - /// Timestamp of the last recorded progress in the reactor. - pub last_progress: Timestamp, - /// The available block range in storage. - pub available_block_range: AvailableBlockRange, - /// The status of the block synchronizer builders. - pub block_sync: BlockSynchronizerStatus, -} - -impl NodeStatus { - #[cfg(test)] - pub(crate) fn random(rng: &mut TestRng) -> Self { - Self { - peers: Peers::random(rng), - build_version: rng.random_string(5..10), - chainspec_name: rng.random_string(5..10), - starting_state_root_hash: Digest::random(rng), - last_added_block_info: rng.gen::().then_some(MinimalBlockInfo::random(rng)), - our_public_signing_key: rng.gen::().then_some(PublicKey::random(rng)), - round_length: rng - .gen::() - .then_some(TimeDiff::from_millis(rng.gen())), - next_upgrade: rng.gen::().then_some(NextUpgrade::random(rng)), - uptime: TimeDiff::from_millis(rng.gen()), - reactor_state: ReactorState::random(rng), - last_progress: Timestamp::random(rng), - available_block_range: AvailableBlockRange::random(rng), - block_sync: BlockSynchronizerStatus::random(rng), - } - } -} - -impl FromBytes for NodeStatus { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (peers, remainder) = FromBytes::from_bytes(bytes)?; - let (build_version, remainder) = String::from_bytes(remainder)?; - let (chainspec_name, remainder) = String::from_bytes(remainder)?; - let (starting_state_root_hash, remainder) = Digest::from_bytes(remainder)?; - let (last_added_block_info, remainder) = Option::::from_bytes(remainder)?; - let (our_public_signing_key, remainder) = Option::::from_bytes(remainder)?; - let (round_length, remainder) = Option::::from_bytes(remainder)?; - let (next_upgrade, remainder) = Option::::from_bytes(remainder)?; - let (uptime, remainder) = TimeDiff::from_bytes(remainder)?; - let (reactor_state, remainder) = ReactorState::from_bytes(remainder)?; - let (last_progress, remainder) = Timestamp::from_bytes(remainder)?; - let (available_block_range, remainder) = AvailableBlockRange::from_bytes(remainder)?; - let (block_sync, remainder) = BlockSynchronizerStatus::from_bytes(remainder)?; - Ok(( - NodeStatus { - peers, - build_version, - chainspec_name, - starting_state_root_hash, - last_added_block_info, - our_public_signing_key, - round_length, - next_upgrade, - uptime, - reactor_state, - last_progress, - available_block_range, - block_sync, - }, - remainder, - )) - } -} - -impl ToBytes for NodeStatus { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - let NodeStatus { - peers, - build_version, - chainspec_name, - starting_state_root_hash, - last_added_block_info, - our_public_signing_key, - round_length, - next_upgrade, - uptime, - reactor_state, - last_progress, - available_block_range, - block_sync, - } = self; - peers.write_bytes(writer)?; - build_version.write_bytes(writer)?; - chainspec_name.write_bytes(writer)?; - starting_state_root_hash.write_bytes(writer)?; - last_added_block_info.write_bytes(writer)?; - our_public_signing_key.write_bytes(writer)?; - round_length.write_bytes(writer)?; - next_upgrade.write_bytes(writer)?; - uptime.write_bytes(writer)?; - reactor_state.write_bytes(writer)?; - last_progress.write_bytes(writer)?; - available_block_range.write_bytes(writer)?; - block_sync.write_bytes(writer) - } - - fn serialized_length(&self) -> usize { - self.peers.serialized_length() - + self.build_version.serialized_length() - + self.chainspec_name.serialized_length() - + self.starting_state_root_hash.serialized_length() - + self.last_added_block_info.serialized_length() - + self.our_public_signing_key.serialized_length() - + self.round_length.serialized_length() - + self.next_upgrade.serialized_length() - + self.uptime.serialized_length() - + self.reactor_state.serialized_length() - + self.last_progress.serialized_length() - + self.available_block_range.serialized_length() - + self.block_sync.serialized_length() - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::testing::TestRng; - - #[test] - fn bytesrepr_roundtrip() { - let rng = &mut TestRng::new(); - - let val = NodeStatus::random(rng); - bytesrepr::test_serialization_roundtrip(&val); - } -} diff --git a/casper_types_ver_2_0/src/binary_port/payload_type.rs b/casper_types_ver_2_0/src/binary_port/payload_type.rs deleted file mode 100644 index 059c8419..00000000 --- a/casper_types_ver_2_0/src/binary_port/payload_type.rs +++ /dev/null @@ -1,510 +0,0 @@ -//! The payload type. - -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; - -#[cfg(test)] -use rand::Rng; - -use alloc::vec::Vec; -use core::{convert::TryFrom, fmt}; - -#[cfg(test)] -use crate::testing::TestRng; - -#[cfg(any(feature = "std", test))] -use super::NodeStatus; -use crate::{ - bytesrepr::{self, FromBytes, ToBytes, U8_SERIALIZED_LENGTH}, - execution::{ExecutionResult, ExecutionResultV1}, - AvailableBlockRange, BlockBody, BlockBodyV1, BlockHeader, BlockHeaderV1, BlockSignatures, - BlockSynchronizerStatus, Deploy, FinalizedApprovals, FinalizedDeployApprovals, Peers, - ReactorState, SignedBlock, StoredValue, Transaction, Transfer, -}; -#[cfg(any(feature = "std", test))] -use crate::{ChainspecRawBytes, NextUpgrade}; - -use super::{ - global_state_query_result::GlobalStateQueryResult, - record_id::RecordId, - type_wrappers::{ - ConsensusStatus, ConsensusValidatorChanges, GetTrieFullResult, LastProgress, NetworkName, - SpeculativeExecutionResult, - }, - TransactionWithExecutionInfo, Uptime, -}; - -/// A type of the payload being returned in a binary response. -#[derive(Debug, Copy, Clone, PartialEq, Eq)] -#[repr(u8)] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -pub enum PayloadType { - /// Legacy version of the block header. - BlockHeaderV1, - /// Block header. - BlockHeader, - /// Legacy version of the block body. - BlockBodyV1, - /// Block body. - BlockBody, - /// Legacy version of the approvals hashes. - ApprovalsHashesV1, - /// Approvals hashes - ApprovalsHashes, - /// Block signatures. - BlockSignatures, - /// Deploy. - Deploy, - /// Transaction. - Transaction, - /// Legacy version of the execution result. - ExecutionResultV1, - /// Execution result. - ExecutionResult, - /// Transfers. - Transfers, - /// Finalized deploy approvals. - FinalizedDeployApprovals, - /// Finalized approvals. - FinalizedApprovals, - /// Block with signatures. - SignedBlock, - /// Transaction with approvals and execution info. - TransactionWithExecutionInfo, - /// Peers. - Peers, - /// Last progress. - LastProgress, - /// State of the reactor. - ReactorState, - /// Network name. - NetworkName, - /// Consensus validator changes. - ConsensusValidatorChanges, // return type in `effects.rs` will be turned into dedicated type. - /// Status of the block synchronizer. - BlockSynchronizerStatus, - /// Available block range. - AvailableBlockRange, - /// Information about the next network upgrade. - NextUpgrade, - /// Consensus status. - ConsensusStatus, // return type in `effects.rs` will be turned into dedicated type. - /// Chainspec represented as raw bytes. - ChainspecRawBytes, - /// Uptime. - Uptime, - /// Result of checking if given block is in the highest available block range. - HighestBlockSequenceCheckResult, - /// Result of the speculative execution, - SpeculativeExecutionResult, - /// Result of querying global state, - GlobalStateQueryResult, - /// Result of querying global state for all values under a specified key. - StoredValues, - /// Result of querying global state for a full trie. - GetTrieFullResult, - /// Node status. - NodeStatus, -} - -impl PayloadType { - pub(crate) fn new_from_record_id(record_id: RecordId, is_legacy: bool) -> Self { - match (is_legacy, record_id) { - (true, RecordId::BlockHeader) => Self::BlockHeaderV1, - (true, RecordId::BlockBody) => Self::BlockBodyV1, - (true, RecordId::ApprovalsHashes) => Self::ApprovalsHashesV1, - (true, RecordId::BlockMetadata) => Self::BlockSignatures, - (true, RecordId::Transaction) => Self::Deploy, - (true, RecordId::ExecutionResult) => Self::ExecutionResultV1, - (true, RecordId::Transfer) => Self::Transfers, - (true, RecordId::FinalizedTransactionApprovals) => Self::FinalizedDeployApprovals, - (false, RecordId::BlockHeader) => Self::BlockHeader, - (false, RecordId::BlockBody) => Self::BlockBody, - (false, RecordId::ApprovalsHashes) => Self::ApprovalsHashes, - (false, RecordId::BlockMetadata) => Self::BlockSignatures, - (false, RecordId::Transaction) => Self::Transaction, - (false, RecordId::ExecutionResult) => Self::ExecutionResult, - (false, RecordId::Transfer) => Self::Transfers, - (false, RecordId::FinalizedTransactionApprovals) => Self::FinalizedApprovals, - } - } - - #[cfg(test)] - pub(crate) fn random(rng: &mut TestRng) -> Self { - Self::try_from(rng.gen_range(0..33)).unwrap() - } -} - -impl TryFrom for PayloadType { - type Error = (); - - fn try_from(v: u8) -> Result { - match v { - x if x == PayloadType::BlockHeaderV1 as u8 => Ok(PayloadType::BlockHeaderV1), - x if x == PayloadType::BlockHeader as u8 => Ok(PayloadType::BlockHeader), - x if x == PayloadType::BlockBodyV1 as u8 => Ok(PayloadType::BlockBodyV1), - x if x == PayloadType::BlockBody as u8 => Ok(PayloadType::BlockBody), - x if x == PayloadType::ApprovalsHashesV1 as u8 => Ok(PayloadType::ApprovalsHashesV1), - x if x == PayloadType::ApprovalsHashes as u8 => Ok(PayloadType::ApprovalsHashes), - x if x == PayloadType::BlockSignatures as u8 => Ok(PayloadType::BlockSignatures), - x if x == PayloadType::Deploy as u8 => Ok(PayloadType::Deploy), - x if x == PayloadType::Transaction as u8 => Ok(PayloadType::Transaction), - x if x == PayloadType::ExecutionResultV1 as u8 => Ok(PayloadType::ExecutionResultV1), - x if x == PayloadType::ExecutionResult as u8 => Ok(PayloadType::ExecutionResult), - x if x == PayloadType::Transfers as u8 => Ok(PayloadType::Transfers), - x if x == PayloadType::FinalizedDeployApprovals as u8 => { - Ok(PayloadType::FinalizedDeployApprovals) - } - x if x == PayloadType::FinalizedApprovals as u8 => Ok(PayloadType::FinalizedApprovals), - x if x == PayloadType::Peers as u8 => Ok(PayloadType::Peers), - x if x == PayloadType::LastProgress as u8 => Ok(PayloadType::LastProgress), - x if x == PayloadType::ReactorState as u8 => Ok(PayloadType::ReactorState), - x if x == PayloadType::NetworkName as u8 => Ok(PayloadType::NetworkName), - x if x == PayloadType::ConsensusValidatorChanges as u8 => { - Ok(PayloadType::ConsensusValidatorChanges) - } - x if x == PayloadType::BlockSynchronizerStatus as u8 => { - Ok(PayloadType::BlockSynchronizerStatus) - } - x if x == PayloadType::AvailableBlockRange as u8 => { - Ok(PayloadType::AvailableBlockRange) - } - x if x == PayloadType::NextUpgrade as u8 => Ok(PayloadType::NextUpgrade), - x if x == PayloadType::ConsensusStatus as u8 => Ok(PayloadType::ConsensusStatus), - x if x == PayloadType::ChainspecRawBytes as u8 => Ok(PayloadType::ChainspecRawBytes), - x if x == PayloadType::Uptime as u8 => Ok(PayloadType::Uptime), - x if x == PayloadType::HighestBlockSequenceCheckResult as u8 => { - Ok(PayloadType::HighestBlockSequenceCheckResult) - } - x if x == PayloadType::SpeculativeExecutionResult as u8 => { - Ok(PayloadType::SpeculativeExecutionResult) - } - x if x == PayloadType::GlobalStateQueryResult as u8 => { - Ok(PayloadType::GlobalStateQueryResult) - } - x if x == PayloadType::StoredValues as u8 => Ok(PayloadType::StoredValues), - x if x == PayloadType::GetTrieFullResult as u8 => Ok(PayloadType::GetTrieFullResult), - x if x == PayloadType::NodeStatus as u8 => Ok(PayloadType::NodeStatus), - _ => Err(()), - } - } -} - -impl From for u8 { - fn from(value: PayloadType) -> Self { - value as u8 - } -} - -impl fmt::Display for PayloadType { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - PayloadType::BlockHeaderV1 => write!(f, "BlockHeaderV1"), - PayloadType::BlockHeader => write!(f, "BlockHeader"), - PayloadType::BlockBodyV1 => write!(f, "BlockBodyV1"), - PayloadType::BlockBody => write!(f, "BlockBody"), - PayloadType::ApprovalsHashesV1 => write!(f, "ApprovalsHashesV1"), - PayloadType::ApprovalsHashes => write!(f, "ApprovalsHashes"), - PayloadType::BlockSignatures => write!(f, "BlockSignatures"), - PayloadType::Deploy => write!(f, "Deploy"), - PayloadType::Transaction => write!(f, "Transaction"), - PayloadType::ExecutionResultV1 => write!(f, "ExecutionResultV1"), - PayloadType::ExecutionResult => write!(f, "ExecutionResult"), - PayloadType::Transfers => write!(f, "Transfers"), - PayloadType::FinalizedDeployApprovals => write!(f, "FinalizedDeployApprovals"), - PayloadType::FinalizedApprovals => write!(f, "FinalizedApprovals"), - PayloadType::SignedBlock => write!(f, "SignedBlock"), - PayloadType::TransactionWithExecutionInfo => write!(f, "TransactionWithExecutionInfo"), - PayloadType::Peers => write!(f, "Peers"), - PayloadType::LastProgress => write!(f, "LastProgress"), - PayloadType::ReactorState => write!(f, "ReactorState"), - PayloadType::NetworkName => write!(f, "NetworkName"), - PayloadType::ConsensusValidatorChanges => write!(f, "ConsensusValidatorChanges"), - PayloadType::BlockSynchronizerStatus => write!(f, "BlockSynchronizerStatus"), - PayloadType::AvailableBlockRange => write!(f, "AvailableBlockRange"), - PayloadType::NextUpgrade => write!(f, "NextUpgrade"), - PayloadType::ConsensusStatus => write!(f, "ConsensusStatus"), - PayloadType::ChainspecRawBytes => write!(f, "ChainspecRawBytes"), - PayloadType::Uptime => write!(f, "Uptime"), - PayloadType::HighestBlockSequenceCheckResult => { - write!(f, "HighestBlockSequenceCheckResult") - } - PayloadType::SpeculativeExecutionResult => write!(f, "SpeculativeExecutionResult"), - PayloadType::GlobalStateQueryResult => write!(f, "GlobalStateQueryResult"), - PayloadType::StoredValues => write!(f, "StoredValues"), - PayloadType::GetTrieFullResult => write!(f, "GetTrieFullResult"), - PayloadType::NodeStatus => write!(f, "NodeStatus"), - } - } -} - -const BLOCK_HEADER_V1_TAG: u8 = 0; -const BLOCK_HEADER_TAG: u8 = 1; -const BLOCK_BODY_V1_TAG: u8 = 2; -const BLOCK_BODY_TAG: u8 = 3; -const APPROVALS_HASHES_TAG: u8 = 4; -const APPROVALS_HASHES_V1: u8 = 5; -const BLOCK_SIGNATURES_TAG: u8 = 6; -const DEPLOY_TAG: u8 = 7; -const TRANSACTION_TAG: u8 = 8; -const EXECUTION_RESULT_V1_TAG: u8 = 9; -const EXECUTION_RESULT_TAG: u8 = 10; -const TRANSFERS_TAG: u8 = 11; -const FINALIZED_DEPLOY_APPROVALS_TAG: u8 = 12; -const FINALIZED_APPROVALS_TAG: u8 = 13; -const SIGNED_BLOCK_TAG: u8 = 14; -const TRANSACTION_WITH_EXECUTION_INFO_TAG: u8 = 15; -const PEERS_TAG: u8 = 16; -const UPTIME_TAG: u8 = 17; -const LAST_PROGRESS_TAG: u8 = 18; -const REACTOR_STATE_TAG: u8 = 19; -const NETWORK_NAME_TAG: u8 = 20; -const CONSENSUS_VALIDATOR_CHANGES_TAG: u8 = 21; -const BLOCK_SYNCHRONIZER_STATUS_TAG: u8 = 22; -const AVAILABLE_BLOCK_RANGE_TAG: u8 = 23; -const NEXT_UPGRADE_TAG: u8 = 24; -const CONSENSUS_STATUS_TAG: u8 = 25; -const CHAINSPEC_RAW_BYTES_TAG: u8 = 26; -const HIGHEST_BLOCK_SEQUENCE_CHECK_RESULT_TAG: u8 = 27; -const SPECULATIVE_EXECUTION_RESULT_TAG: u8 = 28; -const GLOBAL_STATE_QUERY_RESULT_TAG: u8 = 29; -const STORED_VALUES_TAG: u8 = 30; -const GET_TRIE_FULL_RESULT_TAG: u8 = 31; -const NODE_STATUS_TAG: u8 = 32; - -impl ToBytes for PayloadType { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - match self { - PayloadType::BlockHeaderV1 => BLOCK_HEADER_V1_TAG, - PayloadType::BlockHeader => BLOCK_HEADER_TAG, - PayloadType::BlockBodyV1 => BLOCK_BODY_V1_TAG, - PayloadType::BlockBody => BLOCK_BODY_TAG, - PayloadType::ApprovalsHashes => APPROVALS_HASHES_TAG, - PayloadType::ApprovalsHashesV1 => APPROVALS_HASHES_V1, - PayloadType::BlockSignatures => BLOCK_SIGNATURES_TAG, - PayloadType::Deploy => DEPLOY_TAG, - PayloadType::Transaction => TRANSACTION_TAG, - PayloadType::ExecutionResultV1 => EXECUTION_RESULT_V1_TAG, - PayloadType::ExecutionResult => EXECUTION_RESULT_TAG, - PayloadType::Transfers => TRANSFERS_TAG, - PayloadType::FinalizedDeployApprovals => FINALIZED_DEPLOY_APPROVALS_TAG, - PayloadType::FinalizedApprovals => FINALIZED_APPROVALS_TAG, - PayloadType::Peers => PEERS_TAG, - PayloadType::SignedBlock => SIGNED_BLOCK_TAG, - PayloadType::TransactionWithExecutionInfo => TRANSACTION_WITH_EXECUTION_INFO_TAG, - PayloadType::LastProgress => LAST_PROGRESS_TAG, - PayloadType::ReactorState => REACTOR_STATE_TAG, - PayloadType::NetworkName => NETWORK_NAME_TAG, - PayloadType::ConsensusValidatorChanges => CONSENSUS_VALIDATOR_CHANGES_TAG, - PayloadType::BlockSynchronizerStatus => BLOCK_SYNCHRONIZER_STATUS_TAG, - PayloadType::AvailableBlockRange => AVAILABLE_BLOCK_RANGE_TAG, - PayloadType::NextUpgrade => NEXT_UPGRADE_TAG, - PayloadType::ConsensusStatus => CONSENSUS_STATUS_TAG, - PayloadType::ChainspecRawBytes => CHAINSPEC_RAW_BYTES_TAG, - PayloadType::Uptime => UPTIME_TAG, - PayloadType::HighestBlockSequenceCheckResult => HIGHEST_BLOCK_SEQUENCE_CHECK_RESULT_TAG, - PayloadType::SpeculativeExecutionResult => SPECULATIVE_EXECUTION_RESULT_TAG, - PayloadType::GlobalStateQueryResult => GLOBAL_STATE_QUERY_RESULT_TAG, - PayloadType::StoredValues => STORED_VALUES_TAG, - PayloadType::GetTrieFullResult => GET_TRIE_FULL_RESULT_TAG, - PayloadType::NodeStatus => NODE_STATUS_TAG, - } - .write_bytes(writer) - } - - fn serialized_length(&self) -> usize { - U8_SERIALIZED_LENGTH - } -} - -impl FromBytes for PayloadType { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (tag, remainder) = FromBytes::from_bytes(bytes)?; - let record_id = match tag { - BLOCK_HEADER_V1_TAG => PayloadType::BlockHeaderV1, - BLOCK_HEADER_TAG => PayloadType::BlockHeader, - BLOCK_BODY_V1_TAG => PayloadType::BlockBodyV1, - BLOCK_BODY_TAG => PayloadType::BlockBody, - APPROVALS_HASHES_TAG => PayloadType::ApprovalsHashes, - APPROVALS_HASHES_V1 => PayloadType::ApprovalsHashesV1, - BLOCK_SIGNATURES_TAG => PayloadType::BlockSignatures, - DEPLOY_TAG => PayloadType::Deploy, - TRANSACTION_TAG => PayloadType::Transaction, - EXECUTION_RESULT_V1_TAG => PayloadType::ExecutionResultV1, - EXECUTION_RESULT_TAG => PayloadType::ExecutionResult, - TRANSFERS_TAG => PayloadType::Transfers, - FINALIZED_DEPLOY_APPROVALS_TAG => PayloadType::FinalizedDeployApprovals, - FINALIZED_APPROVALS_TAG => PayloadType::FinalizedApprovals, - PEERS_TAG => PayloadType::Peers, - SIGNED_BLOCK_TAG => PayloadType::SignedBlock, - TRANSACTION_WITH_EXECUTION_INFO_TAG => PayloadType::TransactionWithExecutionInfo, - LAST_PROGRESS_TAG => PayloadType::LastProgress, - REACTOR_STATE_TAG => PayloadType::ReactorState, - NETWORK_NAME_TAG => PayloadType::NetworkName, - CONSENSUS_VALIDATOR_CHANGES_TAG => PayloadType::ConsensusValidatorChanges, - BLOCK_SYNCHRONIZER_STATUS_TAG => PayloadType::BlockSynchronizerStatus, - AVAILABLE_BLOCK_RANGE_TAG => PayloadType::AvailableBlockRange, - NEXT_UPGRADE_TAG => PayloadType::NextUpgrade, - CONSENSUS_STATUS_TAG => PayloadType::ConsensusStatus, - CHAINSPEC_RAW_BYTES_TAG => PayloadType::ChainspecRawBytes, - UPTIME_TAG => PayloadType::Uptime, - HIGHEST_BLOCK_SEQUENCE_CHECK_RESULT_TAG => PayloadType::HighestBlockSequenceCheckResult, - SPECULATIVE_EXECUTION_RESULT_TAG => PayloadType::SpeculativeExecutionResult, - GLOBAL_STATE_QUERY_RESULT_TAG => PayloadType::GlobalStateQueryResult, - STORED_VALUES_TAG => PayloadType::StoredValues, - GET_TRIE_FULL_RESULT_TAG => PayloadType::GetTrieFullResult, - NODE_STATUS_TAG => PayloadType::NodeStatus, - _ => return Err(bytesrepr::Error::Formatting), - }; - Ok((record_id, remainder)) - } -} - -/// Represents an entity that can be sent as a payload. -pub trait PayloadEntity { - /// Returns the payload type of the entity. - const PAYLOAD_TYPE: PayloadType; -} - -impl PayloadEntity for Transaction { - const PAYLOAD_TYPE: PayloadType = PayloadType::Transaction; -} - -impl PayloadEntity for Deploy { - const PAYLOAD_TYPE: PayloadType = PayloadType::Deploy; -} - -impl PayloadEntity for BlockHeader { - const PAYLOAD_TYPE: PayloadType = PayloadType::BlockHeader; -} - -impl PayloadEntity for BlockHeaderV1 { - const PAYLOAD_TYPE: PayloadType = PayloadType::BlockHeaderV1; -} - -impl PayloadEntity for BlockBody { - const PAYLOAD_TYPE: PayloadType = PayloadType::BlockBody; -} - -impl PayloadEntity for BlockBodyV1 { - const PAYLOAD_TYPE: PayloadType = PayloadType::BlockBodyV1; -} - -impl PayloadEntity for ExecutionResult { - const PAYLOAD_TYPE: PayloadType = PayloadType::ExecutionResult; -} - -impl PayloadEntity for FinalizedApprovals { - const PAYLOAD_TYPE: PayloadType = PayloadType::FinalizedApprovals; -} - -impl PayloadEntity for FinalizedDeployApprovals { - const PAYLOAD_TYPE: PayloadType = PayloadType::FinalizedDeployApprovals; -} - -impl PayloadEntity for ExecutionResultV1 { - const PAYLOAD_TYPE: PayloadType = PayloadType::ExecutionResultV1; -} - -impl PayloadEntity for SignedBlock { - const PAYLOAD_TYPE: PayloadType = PayloadType::SignedBlock; -} - -impl PayloadEntity for TransactionWithExecutionInfo { - const PAYLOAD_TYPE: PayloadType = PayloadType::TransactionWithExecutionInfo; -} - -impl PayloadEntity for Peers { - const PAYLOAD_TYPE: PayloadType = PayloadType::Peers; -} - -impl PayloadEntity for BlockSignatures { - const PAYLOAD_TYPE: PayloadType = PayloadType::BlockSignatures; -} - -impl PayloadEntity for Vec { - const PAYLOAD_TYPE: PayloadType = PayloadType::Transfers; -} - -impl PayloadEntity for AvailableBlockRange { - const PAYLOAD_TYPE: PayloadType = PayloadType::AvailableBlockRange; -} - -#[cfg(any(feature = "std", test))] -impl PayloadEntity for ChainspecRawBytes { - const PAYLOAD_TYPE: PayloadType = PayloadType::ChainspecRawBytes; -} - -impl PayloadEntity for ConsensusValidatorChanges { - const PAYLOAD_TYPE: PayloadType = PayloadType::ConsensusValidatorChanges; -} - -impl PayloadEntity for GlobalStateQueryResult { - const PAYLOAD_TYPE: PayloadType = PayloadType::GlobalStateQueryResult; -} - -impl PayloadEntity for Vec { - const PAYLOAD_TYPE: PayloadType = PayloadType::StoredValues; -} - -impl PayloadEntity for GetTrieFullResult { - const PAYLOAD_TYPE: PayloadType = PayloadType::GetTrieFullResult; -} - -impl PayloadEntity for SpeculativeExecutionResult { - const PAYLOAD_TYPE: PayloadType = PayloadType::SpeculativeExecutionResult; -} - -#[cfg(any(feature = "std", test))] -impl PayloadEntity for NodeStatus { - const PAYLOAD_TYPE: PayloadType = PayloadType::NodeStatus; -} - -#[cfg(any(feature = "std", test))] -impl PayloadEntity for NextUpgrade { - const PAYLOAD_TYPE: PayloadType = PayloadType::NextUpgrade; -} - -impl PayloadEntity for Uptime { - const PAYLOAD_TYPE: PayloadType = PayloadType::Uptime; -} - -impl PayloadEntity for LastProgress { - const PAYLOAD_TYPE: PayloadType = PayloadType::LastProgress; -} - -impl PayloadEntity for ReactorState { - const PAYLOAD_TYPE: PayloadType = PayloadType::ReactorState; -} - -impl PayloadEntity for NetworkName { - const PAYLOAD_TYPE: PayloadType = PayloadType::NetworkName; -} - -impl PayloadEntity for BlockSynchronizerStatus { - const PAYLOAD_TYPE: PayloadType = PayloadType::BlockSynchronizerStatus; -} - -impl PayloadEntity for ConsensusStatus { - const PAYLOAD_TYPE: PayloadType = PayloadType::ConsensusStatus; -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::testing::TestRng; - - #[test] - fn bytesrepr_roundtrip() { - let rng = &mut TestRng::new(); - - let val = PayloadType::random(rng); - bytesrepr::test_serialization_roundtrip(&val); - } -} diff --git a/casper_types_ver_2_0/src/binary_port/record_id.rs b/casper_types_ver_2_0/src/binary_port/record_id.rs deleted file mode 100644 index f7ef6dfe..00000000 --- a/casper_types_ver_2_0/src/binary_port/record_id.rs +++ /dev/null @@ -1,105 +0,0 @@ -use core::convert::TryFrom; - -#[cfg(test)] -use rand::Rng; -use serde::Serialize; - -#[cfg(test)] -use crate::testing::TestRng; - -/// An identifier of a record type. -#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, Serialize)] -#[repr(u16)] -pub enum RecordId { - /// Refers to `BlockHeader` record. - BlockHeader = 0, - /// Refers to `BlockBody` record. - BlockBody = 1, - /// Refers to `ApprovalsHashes` record. - ApprovalsHashes = 2, - /// Refers to `BlockMetadata` record. - BlockMetadata = 3, - /// Refers to `Transaction` record. - Transaction = 4, - /// Refers to `ExecutionResult` record. - ExecutionResult = 5, - /// Refers to `Transfer` record. - Transfer = 6, - /// Refers to `FinalizedTransactionApprovals` record. - FinalizedTransactionApprovals = 7, -} - -impl RecordId { - #[cfg(test)] - pub(crate) fn random(rng: &mut TestRng) -> Self { - match rng.gen_range(0..8) { - 0 => RecordId::BlockHeader, - 1 => RecordId::BlockBody, - 2 => RecordId::ApprovalsHashes, - 3 => RecordId::BlockMetadata, - 4 => RecordId::Transaction, - 5 => RecordId::ExecutionResult, - 6 => RecordId::Transfer, - 7 => RecordId::FinalizedTransactionApprovals, - _ => unreachable!(), - } - } -} - -impl TryFrom for RecordId { - type Error = UnknownRecordId; - - fn try_from(value: u16) -> Result { - match value { - 0 => Ok(RecordId::BlockHeader), - 1 => Ok(RecordId::BlockBody), - 2 => Ok(RecordId::ApprovalsHashes), - 3 => Ok(RecordId::BlockMetadata), - 4 => Ok(RecordId::Transaction), - 5 => Ok(RecordId::ExecutionResult), - 6 => Ok(RecordId::Transfer), - 7 => Ok(RecordId::FinalizedTransactionApprovals), - _ => Err(UnknownRecordId(value)), - } - } -} - -impl From for u16 { - fn from(value: RecordId) -> Self { - value as u16 - } -} - -impl core::fmt::Display for RecordId { - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - match self { - RecordId::BlockHeader => write!(f, "BlockHeader"), - RecordId::BlockBody => write!(f, "BlockBody"), - RecordId::ApprovalsHashes => write!(f, "ApprovalsHashes"), - RecordId::BlockMetadata => write!(f, "BlockMetadata"), - RecordId::Transaction => write!(f, "Transaction"), - RecordId::ExecutionResult => write!(f, "ExecutionResult"), - RecordId::Transfer => write!(f, "Transfer"), - RecordId::FinalizedTransactionApprovals => write!(f, "FinalizedTransactionApprovals"), - } - } -} - -/// Error returned when trying to convert a `u16` into a `RecordId`. -#[derive(Debug, PartialEq, Eq)] -pub struct UnknownRecordId(u16); - -#[cfg(test)] -mod tests { - use super::*; - use crate::testing::TestRng; - - #[test] - fn tag_roundtrip() { - let rng = &mut TestRng::new(); - - let val = RecordId::random(rng); - let tag = u16::from(val); - assert_eq!(RecordId::try_from(tag), Ok(val)); - } -} diff --git a/casper_types_ver_2_0/src/binary_port/state_request.rs b/casper_types_ver_2_0/src/binary_port/state_request.rs deleted file mode 100644 index fddb86dc..00000000 --- a/casper_types_ver_2_0/src/binary_port/state_request.rs +++ /dev/null @@ -1,186 +0,0 @@ -use alloc::string::String; -use alloc::vec::Vec; - -#[cfg(test)] -use rand::Rng; - -#[cfg(test)] -use crate::testing::TestRng; -use crate::{ - bytesrepr::{self, FromBytes, ToBytes, U8_SERIALIZED_LENGTH}, - Digest, GlobalStateIdentifier, Key, KeyTag, -}; - -const ITEM_TAG: u8 = 0; -const ALL_ITEMS_TAG: u8 = 1; -const TRIE_TAG: u8 = 2; - -/// A request to get data from the global state. -#[derive(Clone, Debug, PartialEq)] -pub enum GlobalStateRequest { - /// Gets an item from the global state. - Item { - /// Global state identifier, `None` means "latest block state". - state_identifier: Option, - /// Key under which data is stored. - base_key: Key, - /// Path under which the value is stored. - path: Vec, - }, - /// Get all items under the given key tag. - AllItems { - /// Global state identifier, `None` means "latest block state". - state_identifier: Option, - /// Key tag - key_tag: KeyTag, - }, - /// Get a trie by its Digest. - Trie { - /// A trie key. - trie_key: Digest, - }, -} - -impl GlobalStateRequest { - #[cfg(test)] - pub(crate) fn random(rng: &mut TestRng) -> Self { - match rng.gen_range(0..3) { - 0 => { - let path_count = rng.gen_range(10..20); - let state_identifier = if rng.gen() { - Some(GlobalStateIdentifier::random(rng)) - } else { - None - }; - GlobalStateRequest::Item { - state_identifier, - base_key: rng.gen(), - path: std::iter::repeat_with(|| rng.random_string(32..64)) - .take(path_count) - .collect(), - } - } - 1 => { - let state_identifier = if rng.gen() { - Some(GlobalStateIdentifier::random(rng)) - } else { - None - }; - GlobalStateRequest::AllItems { - state_identifier, - key_tag: KeyTag::random(rng), - } - } - 2 => GlobalStateRequest::Trie { - trie_key: Digest::random(rng), - }, - _ => unreachable!(), - } - } -} - -impl ToBytes for GlobalStateRequest { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - match self { - GlobalStateRequest::Item { - state_identifier, - base_key, - path, - } => { - ITEM_TAG.write_bytes(writer)?; - state_identifier.write_bytes(writer)?; - base_key.write_bytes(writer)?; - path.write_bytes(writer) - } - GlobalStateRequest::AllItems { - state_identifier, - key_tag, - } => { - ALL_ITEMS_TAG.write_bytes(writer)?; - state_identifier.write_bytes(writer)?; - key_tag.write_bytes(writer) - } - GlobalStateRequest::Trie { trie_key } => { - TRIE_TAG.write_bytes(writer)?; - trie_key.write_bytes(writer) - } - } - } - - fn serialized_length(&self) -> usize { - U8_SERIALIZED_LENGTH - + match self { - GlobalStateRequest::Item { - state_identifier, - base_key, - path, - } => { - state_identifier.serialized_length() - + base_key.serialized_length() - + path.serialized_length() - } - GlobalStateRequest::AllItems { - state_identifier, - key_tag, - } => state_identifier.serialized_length() + key_tag.serialized_length(), - GlobalStateRequest::Trie { trie_key } => trie_key.serialized_length(), - } - } -} - -impl FromBytes for GlobalStateRequest { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (tag, remainder) = u8::from_bytes(bytes)?; - match tag { - ITEM_TAG => { - let (state_identifier, remainder) = FromBytes::from_bytes(remainder)?; - let (base_key, remainder) = FromBytes::from_bytes(remainder)?; - let (path, remainder) = FromBytes::from_bytes(remainder)?; - Ok(( - GlobalStateRequest::Item { - state_identifier, - base_key, - path, - }, - remainder, - )) - } - ALL_ITEMS_TAG => { - let (state_identifier, remainder) = FromBytes::from_bytes(remainder)?; - let (key_tag, remainder) = FromBytes::from_bytes(remainder)?; - Ok(( - GlobalStateRequest::AllItems { - state_identifier, - key_tag, - }, - remainder, - )) - } - TRIE_TAG => { - let (trie_key, remainder) = Digest::from_bytes(remainder)?; - Ok((GlobalStateRequest::Trie { trie_key }, remainder)) - } - _ => Err(bytesrepr::Error::Formatting), - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::testing::TestRng; - - #[test] - fn bytesrepr_roundtrip() { - let rng = &mut TestRng::new(); - - let val = GlobalStateRequest::random(rng); - bytesrepr::test_serialization_roundtrip(&val); - } -} diff --git a/casper_types_ver_2_0/src/binary_port/type_wrappers.rs b/casper_types_ver_2_0/src/binary_port/type_wrappers.rs deleted file mode 100644 index cd4f92fc..00000000 --- a/casper_types_ver_2_0/src/binary_port/type_wrappers.rs +++ /dev/null @@ -1,349 +0,0 @@ -use core::{convert::TryFrom, num::TryFromIntError, time::Duration}; - -use alloc::{ - collections::BTreeMap, - string::{String, ToString}, - vec::Vec, -}; -#[cfg(feature = "datasize")] -use datasize::DataSize; - -use crate::{ - bytesrepr::{self, Bytes, FromBytes, ToBytes}, - contract_messages::Messages, - execution::ExecutionResultV2, - EraId, ExecutionInfo, PublicKey, TimeDiff, Timestamp, Transaction, ValidatorChange, -}; - -// `bytesrepr` implementations for type wrappers are repetitive, hence this macro helper. We should -// get rid of this after we introduce the proper "bytesrepr-derive" proc macro. -macro_rules! impl_bytesrepr_for_type_wrapper { - ($t:ident) => { - impl ToBytes for $t { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - self.0.to_bytes() - } - - fn serialized_length(&self) -> usize { - self.0.serialized_length() - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.0.write_bytes(writer) - } - } - - impl FromBytes for $t { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (inner, remainder) = FromBytes::from_bytes(bytes)?; - Ok(($t(inner), remainder)) - } - } - }; -} - -/// Type representing uptime. -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub struct Uptime(u64); - -impl Uptime { - /// Constructs new uptime. - pub fn new(value: u64) -> Self { - Self(value) - } - - /// Retrieve the inner value. - pub fn into_inner(self) -> u64 { - self.0 - } -} - -impl From for Duration { - fn from(uptime: Uptime) -> Self { - Duration::from_secs(uptime.0) - } -} - -impl TryFrom for TimeDiff { - type Error = TryFromIntError; - - fn try_from(uptime: Uptime) -> Result { - u32::try_from(uptime.0).map(TimeDiff::from_seconds) - } -} - -/// Type representing changes in consensus validators. -#[derive(Debug, PartialEq, Eq)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -pub struct ConsensusValidatorChanges(BTreeMap>); - -impl ConsensusValidatorChanges { - /// Constructs new consensus validator changes. - pub fn new(value: BTreeMap>) -> Self { - Self(value) - } - - /// Retrieve the inner value. - pub fn into_inner(self) -> BTreeMap> { - self.0 - } -} - -impl From for BTreeMap> { - fn from(consensus_validator_changes: ConsensusValidatorChanges) -> Self { - consensus_validator_changes.0 - } -} - -/// Type representing network name. -#[derive(Debug, PartialEq, Eq)] -pub struct NetworkName(String); - -impl NetworkName { - /// Constructs new network name. - pub fn new(value: impl ToString) -> Self { - Self(value.to_string()) - } - - /// Retrieve the inner value. - pub fn into_inner(self) -> String { - self.0 - } -} - -impl From for String { - fn from(network_name: NetworkName) -> Self { - network_name.0 - } -} - -/// Type representing last progress of the sync process. -#[derive(Debug, PartialEq, Eq)] -pub struct LastProgress(Timestamp); - -impl LastProgress { - /// Constructs new last progress. - pub fn new(value: Timestamp) -> Self { - Self(value) - } - - /// Retrieve the inner value. - pub fn into_inner(self) -> Timestamp { - self.0 - } -} - -impl From for Timestamp { - fn from(last_progress: LastProgress) -> Self { - last_progress.0 - } -} - -/// Type representing results of the speculative execution. -#[derive(Debug, PartialEq, Eq)] -pub struct SpeculativeExecutionResult(Option<(ExecutionResultV2, Messages)>); - -impl SpeculativeExecutionResult { - /// Constructs new speculative execution result. - pub fn new(value: Option<(ExecutionResultV2, Messages)>) -> Self { - Self(value) - } - - /// Returns the inner value. - pub fn into_inner(self) -> Option<(ExecutionResultV2, Messages)> { - self.0 - } -} - -/// Type representing results of the get full trie request. -#[derive(Debug, PartialEq, Eq)] -pub struct GetTrieFullResult(Option); - -impl GetTrieFullResult { - /// Constructs new get trie result. - pub fn new(value: Option) -> Self { - Self(value) - } - - /// Returns the inner value. - pub fn into_inner(self) -> Option { - self.0 - } -} - -/// Describes the consensus status. -#[derive(Debug, PartialEq, Eq)] -pub struct ConsensusStatus { - validator_public_key: PublicKey, - round_length: Option, -} - -impl ConsensusStatus { - /// Constructs new consensus status. - pub fn new(validator_public_key: PublicKey, round_length: Option) -> Self { - Self { - validator_public_key, - round_length, - } - } - - /// Returns the validator public key. - pub fn validator_public_key(&self) -> &PublicKey { - &self.validator_public_key - } - - /// Returns the round length. - pub fn round_length(&self) -> Option { - self.round_length - } -} - -impl ToBytes for ConsensusStatus { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - self.validator_public_key.serialized_length() + self.round_length.serialized_length() - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.validator_public_key.write_bytes(writer)?; - self.round_length.write_bytes(writer) - } -} - -impl FromBytes for ConsensusStatus { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (validator_public_key, remainder) = FromBytes::from_bytes(bytes)?; - let (round_length, remainder) = FromBytes::from_bytes(remainder)?; - Ok(( - ConsensusStatus::new(validator_public_key, round_length), - remainder, - )) - } -} - -/// A transaction with execution info. -#[derive(Debug, PartialEq, Eq, Clone)] -pub struct TransactionWithExecutionInfo { - transaction: Transaction, - execution_info: Option, -} - -impl TransactionWithExecutionInfo { - /// Constructs new transaction with execution info. - pub fn new(transaction: Transaction, execution_info: Option) -> Self { - Self { - transaction, - execution_info, - } - } - - /// Converts `self` into the transaction and execution info. - pub fn into_inner(self) -> (Transaction, Option) { - (self.transaction, self.execution_info) - } -} - -impl ToBytes for TransactionWithExecutionInfo { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.transaction.write_bytes(writer)?; - self.execution_info.write_bytes(writer) - } - - fn serialized_length(&self) -> usize { - self.transaction.serialized_length() + self.execution_info.serialized_length() - } -} - -impl FromBytes for TransactionWithExecutionInfo { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (transaction, remainder) = FromBytes::from_bytes(bytes)?; - let (execution_info, remainder) = FromBytes::from_bytes(remainder)?; - Ok(( - TransactionWithExecutionInfo::new(transaction, execution_info), - remainder, - )) - } -} - -impl_bytesrepr_for_type_wrapper!(Uptime); -impl_bytesrepr_for_type_wrapper!(ConsensusValidatorChanges); -impl_bytesrepr_for_type_wrapper!(NetworkName); -impl_bytesrepr_for_type_wrapper!(LastProgress); -impl_bytesrepr_for_type_wrapper!(SpeculativeExecutionResult); -impl_bytesrepr_for_type_wrapper!(GetTrieFullResult); - -#[cfg(test)] -mod tests { - use core::iter::FromIterator; - use rand::Rng; - - use super::*; - use crate::testing::TestRng; - - #[test] - fn uptime_roundtrip() { - let rng = &mut TestRng::new(); - bytesrepr::test_serialization_roundtrip(&Uptime::new(rng.gen())); - } - - #[test] - fn consensus_validator_changes_roundtrip() { - let rng = &mut TestRng::new(); - let map = BTreeMap::from_iter([( - PublicKey::random(rng), - vec![(EraId::random(rng), ValidatorChange::random(rng))], - )]); - bytesrepr::test_serialization_roundtrip(&ConsensusValidatorChanges::new(map)); - } - - #[test] - fn network_name_roundtrip() { - let rng = &mut TestRng::new(); - bytesrepr::test_serialization_roundtrip(&NetworkName::new(rng.random_string(5..20))); - } - - #[test] - fn last_progress_roundtrip() { - let rng = &mut TestRng::new(); - bytesrepr::test_serialization_roundtrip(&LastProgress::new(Timestamp::random(rng))); - } - - #[test] - fn speculative_execution_result_roundtrip() { - let rng = &mut TestRng::new(); - if rng.gen_bool(0.5) { - bytesrepr::test_serialization_roundtrip(&SpeculativeExecutionResult::new(None)); - } else { - bytesrepr::test_serialization_roundtrip(&SpeculativeExecutionResult::new(Some(( - ExecutionResultV2::random(rng), - rng.random_vec(0..20), - )))); - } - } - - #[test] - fn get_trie_full_result_roundtrip() { - let rng = &mut TestRng::new(); - bytesrepr::test_serialization_roundtrip(&GetTrieFullResult::new(rng.gen())); - } - - #[test] - fn consensus_status_roundtrip() { - let rng = &mut TestRng::new(); - bytesrepr::test_serialization_roundtrip(&ConsensusStatus::new( - PublicKey::random(rng), - Some(TimeDiff::from_millis(rng.gen())), - )); - } -} diff --git a/casper_types_ver_2_0/src/block.rs b/casper_types_ver_2_0/src/block.rs deleted file mode 100644 index 1e84169d..00000000 --- a/casper_types_ver_2_0/src/block.rs +++ /dev/null @@ -1,494 +0,0 @@ -mod available_block_range; -mod block_body; -mod block_hash; -mod block_hash_and_height; -mod block_header; -mod block_identifier; -mod block_signatures; -mod block_sync_status; -mod block_v1; -mod block_v2; -mod era_end; -mod finality_signature; -mod finality_signature_id; -mod json_compatibility; -mod rewarded_signatures; -mod rewards; -mod signed_block; -mod signed_block_header; - -#[cfg(any(feature = "testing", test))] -mod test_block_builder { - pub mod test_block_v1_builder; - pub mod test_block_v2_builder; -} - -use alloc::{boxed::Box, vec::Vec}; -use core::fmt::{self, Display, Formatter}; -#[cfg(feature = "json-schema")] -use once_cell::sync::Lazy; -#[cfg(feature = "std")] -use std::error::Error as StdError; - -#[cfg(feature = "datasize")] -use datasize::DataSize; - -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; - -use crate::{ - bytesrepr, - bytesrepr::{FromBytes, ToBytes, U8_SERIALIZED_LENGTH}, - Digest, EraId, ProtocolVersion, PublicKey, Timestamp, -}; -pub use available_block_range::AvailableBlockRange; -pub use block_body::{BlockBody, BlockBodyV1, BlockBodyV2}; -pub use block_hash::BlockHash; -pub use block_hash_and_height::BlockHashAndHeight; -pub use block_header::{BlockHeader, BlockHeaderV1, BlockHeaderV2}; -pub use block_identifier::BlockIdentifier; -pub use block_signatures::{BlockSignatures, BlockSignaturesMergeError}; -pub use block_sync_status::{BlockSyncStatus, BlockSynchronizerStatus}; -pub use block_v1::BlockV1; -pub use block_v2::BlockV2; -pub use era_end::{EraEnd, EraEndV1, EraEndV2, EraReport}; -pub use finality_signature::FinalitySignature; -pub use finality_signature_id::FinalitySignatureId; -#[cfg(all(feature = "std", feature = "json-schema"))] -pub use json_compatibility::JsonBlockWithSignatures; -pub use rewarded_signatures::{RewardedSignatures, SingleBlockRewardedSignatures}; -pub use rewards::Rewards; -pub use signed_block::SignedBlock; -pub use signed_block_header::{SignedBlockHeader, SignedBlockHeaderValidationError}; -#[cfg(any(feature = "testing", test))] -pub use test_block_builder::{ - test_block_v1_builder::TestBlockV1Builder, - test_block_v2_builder::TestBlockV2Builder as TestBlockBuilder, -}; - -#[cfg(feature = "json-schema")] -static BLOCK: Lazy = Lazy::new(|| BlockV2::example().into()); - -/// An error that can arise when validating a block's cryptographic integrity using its hashes. -#[derive(Clone, Eq, PartialEq, Debug)] -#[cfg_attr(any(feature = "std", test), derive(serde::Serialize))] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[non_exhaustive] -pub enum BlockValidationError { - /// Problem serializing some of a block's data into bytes. - Bytesrepr(bytesrepr::Error), - /// The provided block's hash is not the same as the actual hash of the block. - UnexpectedBlockHash { - /// The block with the incorrect block hash. - block: Box, - /// The actual hash of the block. - actual_block_hash: BlockHash, - }, - /// The body hash in the header is not the same as the actual hash of the body of the block. - UnexpectedBodyHash { - /// The block with the header containing the incorrect block body hash. - block: Box, - /// The actual hash of the block's body. - actual_block_body_hash: Digest, - }, - /// The header version does not match the body version. - IncompatibleVersions, -} - -impl Display for BlockValidationError { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - match self { - BlockValidationError::Bytesrepr(error) => { - write!(formatter, "error validating block: {}", error) - } - BlockValidationError::UnexpectedBlockHash { - block, - actual_block_hash, - } => { - write!( - formatter, - "block has incorrect block hash - actual block hash: {:?}, block: {:?}", - actual_block_hash, block - ) - } - BlockValidationError::UnexpectedBodyHash { - block, - actual_block_body_hash, - } => { - write!( - formatter, - "block header has incorrect body hash - actual body hash: {:?}, block: {:?}", - actual_block_body_hash, block - ) - } - BlockValidationError::IncompatibleVersions => { - write!(formatter, "block body and header versions do not match") - } - } - } -} - -impl From for BlockValidationError { - fn from(error: bytesrepr::Error) -> Self { - BlockValidationError::Bytesrepr(error) - } -} - -#[cfg(feature = "std")] -impl StdError for BlockValidationError { - fn source(&self) -> Option<&(dyn StdError + 'static)> { - match self { - BlockValidationError::Bytesrepr(error) => Some(error), - BlockValidationError::UnexpectedBlockHash { .. } - | BlockValidationError::UnexpectedBodyHash { .. } - | BlockValidationError::IncompatibleVersions => None, - } - } -} - -#[derive(Clone, Copy, PartialEq, Eq, Debug)] -pub enum BlockConversionError { - DifferentVersion { expected_version: u8 }, -} - -#[cfg(feature = "std")] -impl Display for BlockConversionError { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - match self { - BlockConversionError::DifferentVersion { expected_version } => { - write!( - f, - "Could not convert a block to the expected version {}", - expected_version - ) - } - } - } -} - -const TAG_LENGTH: usize = U8_SERIALIZED_LENGTH; - -/// Tag for block body v1. -const BLOCK_V1_TAG: u8 = 0; -/// Tag for block body v2. -const BLOCK_V2_TAG: u8 = 1; - -/// A block after execution. -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr( - any(feature = "std", feature = "json-schema", test), - derive(serde::Serialize, serde::Deserialize) -)] -#[derive(Clone, Debug, PartialEq, Eq)] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -pub enum Block { - /// The legacy, initial version of the block. - #[cfg_attr( - any(feature = "std", feature = "json-schema", test), - serde(rename = "Version1") - )] - V1(BlockV1), - /// The version 2 of the block. - #[cfg_attr( - any(feature = "std", feature = "json-schema", test), - serde(rename = "Version2") - )] - V2(BlockV2), -} - -impl Block { - // This method is not intended to be used by third party crates. - #[doc(hidden)] - pub fn new_from_header_and_body( - block_header: BlockHeader, - block_body: BlockBody, - ) -> Result> { - let hash = block_header.block_hash(); - let block = match (block_body, block_header) { - (BlockBody::V1(body), BlockHeader::V1(header)) => { - Ok(Block::V1(BlockV1 { hash, header, body })) - } - (BlockBody::V2(body), BlockHeader::V2(header)) => { - Ok(Block::V2(BlockV2 { hash, header, body })) - } - _ => Err(BlockValidationError::IncompatibleVersions), - }?; - - block.verify()?; - Ok(block) - } - - /// Clones the header, put it in the versioning enum, and returns it. - pub fn clone_header(&self) -> BlockHeader { - match self { - Block::V1(v1) => BlockHeader::V1(v1.header().clone()), - Block::V2(v2) => BlockHeader::V2(v2.header().clone()), - } - } - - /// Returns the block's header, consuming `self`. - pub fn take_header(self) -> BlockHeader { - match self { - Block::V1(v1) => BlockHeader::V1(v1.take_header()), - Block::V2(v2) => BlockHeader::V2(v2.take_header()), - } - } - - /// Returns the timestamp from when the block was proposed. - pub fn timestamp(&self) -> Timestamp { - match self { - Block::V1(v1) => v1.header.timestamp(), - Block::V2(v2) => v2.header.timestamp(), - } - } - - /// Returns the protocol version of the network from when this block was created. - pub fn protocol_version(&self) -> ProtocolVersion { - match self { - Block::V1(v1) => v1.header.protocol_version(), - Block::V2(v2) => v2.header.protocol_version(), - } - } - - /// The hash of this block's header. - pub fn hash(&self) -> &BlockHash { - match self { - Block::V1(v1) => v1.hash(), - Block::V2(v2) => v2.hash(), - } - } - - /// Returns the hash of the block's body. - pub fn body_hash(&self) -> &Digest { - match self { - Block::V1(v1) => v1.header().body_hash(), - Block::V2(v2) => v2.header().body_hash(), - } - } - - /// Returns a random bit needed for initializing a future era. - pub fn random_bit(&self) -> bool { - match self { - Block::V1(v1) => v1.header().random_bit(), - Block::V2(v2) => v2.header().random_bit(), - } - } - - /// Returns a seed needed for initializing a future era. - pub fn accumulated_seed(&self) -> &Digest { - match self { - Block::V1(v1) => v1.accumulated_seed(), - Block::V2(v2) => v2.accumulated_seed(), - } - } - - /// Returns the parent block's hash. - pub fn parent_hash(&self) -> &BlockHash { - match self { - Block::V1(v1) => v1.parent_hash(), - Block::V2(v2) => v2.parent_hash(), - } - } - - /// Returns the public key of the validator which proposed the block. - pub fn proposer(&self) -> &PublicKey { - match self { - Block::V1(v1) => v1.proposer(), - Block::V2(v2) => v2.proposer(), - } - } - - /// Clone the body and wrap is up in the versioned `Body`. - pub fn clone_body(&self) -> BlockBody { - match self { - Block::V1(v1) => BlockBody::V1(v1.body().clone()), - Block::V2(v2) => BlockBody::V2(v2.body().clone()), - } - } - - /// Check the integrity of a block by hashing its body and header - pub fn verify(&self) -> Result<(), BlockValidationError> { - match self { - Block::V1(v1) => v1.verify(), - Block::V2(v2) => v2.verify(), - } - } - - /// Returns the height of this block, i.e. the number of ancestors. - pub fn height(&self) -> u64 { - match self { - Block::V1(v1) => v1.header.height(), - Block::V2(v2) => v2.header.height(), - } - } - - /// Returns the era ID in which this block was created. - pub fn era_id(&self) -> EraId { - match self { - Block::V1(v1) => v1.era_id(), - Block::V2(v2) => v2.era_id(), - } - } - - /// Clones the era end, put it in the versioning enum, and returns it. - pub fn clone_era_end(&self) -> Option { - match self { - Block::V1(v1) => v1.header().era_end().cloned().map(EraEnd::V1), - Block::V2(v2) => v2.header().era_end().cloned().map(EraEnd::V2), - } - } - - /// Returns `true` if this block is the last one in the current era. - pub fn is_switch_block(&self) -> bool { - match self { - Block::V1(v1) => v1.header.is_switch_block(), - Block::V2(v2) => v2.header.is_switch_block(), - } - } - - /// Returns `true` if this block is the first block of the chain, the genesis block. - pub fn is_genesis(&self) -> bool { - match self { - Block::V1(v1) => v1.header.is_genesis(), - Block::V2(v2) => v2.header.is_genesis(), - } - } - - /// Returns the root hash of global state after the deploys in this block have been executed. - pub fn state_root_hash(&self) -> &Digest { - match self { - Block::V1(v1) => v1.header.state_root_hash(), - Block::V2(v2) => v2.header.state_root_hash(), - } - } - - /// List of identifiers for finality signatures for a particular past block. - pub fn rewarded_signatures(&self) -> &RewardedSignatures { - match self { - Block::V1(_v1) => &rewarded_signatures::EMPTY, - Block::V2(v2) => v2.body.rewarded_signatures(), - } - } - - // This method is not intended to be used by third party crates. - #[doc(hidden)] - #[cfg(feature = "json-schema")] - pub fn example() -> &'static Self { - &BLOCK - } -} - -impl Display for Block { - fn fmt(&self, formatter: &mut Formatter<'_>) -> fmt::Result { - write!( - formatter, - "executed block #{}, {}, timestamp {}, {}, parent {}, post-state hash {}, body hash \ - {}, random bit {}, protocol version: {}", - self.height(), - self.hash(), - self.timestamp(), - self.era_id(), - self.parent_hash().inner(), - self.state_root_hash(), - self.body_hash(), - self.random_bit(), - self.protocol_version() - )?; - if let Some(era_end) = self.clone_era_end() { - write!(formatter, ", era_end: {}", era_end)?; - } - Ok(()) - } -} - -impl ToBytes for Block { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - match self { - Block::V1(v1) => { - buffer.insert(0, BLOCK_V1_TAG); - buffer.extend(v1.to_bytes()?); - } - Block::V2(v2) => { - buffer.insert(0, BLOCK_V2_TAG); - buffer.extend(v2.to_bytes()?); - } - } - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - TAG_LENGTH - + match self { - Block::V1(v1) => v1.serialized_length(), - Block::V2(v2) => v2.serialized_length(), - } - } -} - -impl FromBytes for Block { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (tag, remainder) = u8::from_bytes(bytes)?; - match tag { - BLOCK_V1_TAG => { - let (body, remainder): (BlockV1, _) = FromBytes::from_bytes(remainder)?; - Ok((Self::V1(body), remainder)) - } - BLOCK_V2_TAG => { - let (body, remainder): (BlockV2, _) = FromBytes::from_bytes(remainder)?; - Ok((Self::V2(body), remainder)) - } - _ => Err(bytesrepr::Error::Formatting), - } - } -} - -impl From<&BlockV2> for Block { - fn from(block: &BlockV2) -> Self { - Block::V2(block.clone()) - } -} - -impl From for Block { - fn from(block: BlockV2) -> Self { - Block::V2(block) - } -} - -impl From<&BlockV1> for Block { - fn from(block: &BlockV1) -> Self { - Block::V1(block.clone()) - } -} - -impl From for Block { - fn from(block: BlockV1) -> Self { - Block::V1(block) - } -} - -#[cfg(all(feature = "std", feature = "json-schema"))] -impl From for Block { - fn from(block_with_signatures: JsonBlockWithSignatures) -> Self { - block_with_signatures.block - } -} - -#[cfg(test)] -mod tests { - use crate::{bytesrepr, testing::TestRng}; - - use super::*; - - #[test] - fn bytesrepr_roundtrip() { - let rng = &mut TestRng::new(); - let block_v1 = TestBlockV1Builder::new().build(rng); - let block = Block::V1(block_v1); - bytesrepr::test_serialization_roundtrip(&block); - - let block_v2 = TestBlockBuilder::new().build(rng); - let block = Block::V2(block_v2); - bytesrepr::test_serialization_roundtrip(&block); - } -} diff --git a/casper_types_ver_2_0/src/block/available_block_range.rs b/casper_types_ver_2_0/src/block/available_block_range.rs deleted file mode 100644 index 99c2fe32..00000000 --- a/casper_types_ver_2_0/src/block/available_block_range.rs +++ /dev/null @@ -1,110 +0,0 @@ -use core::fmt::{self, Display, Formatter}; - -use alloc::vec::Vec; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -use crate::bytesrepr::{self, FromBytes, ToBytes}; - -#[cfg(test)] -use rand::Rng; - -#[cfg(test)] -use crate::testing::TestRng; - -/// An unbroken, inclusive range of blocks. -#[derive(Copy, Clone, PartialOrd, Ord, PartialEq, Eq, Hash, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[serde(deny_unknown_fields)] -pub struct AvailableBlockRange { - /// The inclusive lower bound of the range. - low: u64, - /// The inclusive upper bound of the range. - high: u64, -} - -impl AvailableBlockRange { - /// An `AvailableRange` of [0, 0]. - pub const RANGE_0_0: AvailableBlockRange = AvailableBlockRange { low: 0, high: 0 }; - - /// Constructs a new `AvailableBlockRange` with the given limits. - pub fn new(low: u64, high: u64) -> Self { - assert!( - low <= high, - "cannot construct available block range with low > high" - ); - AvailableBlockRange { low, high } - } - - /// Returns `true` if `height` is within the range. - pub fn contains(&self, height: u64) -> bool { - height >= self.low && height <= self.high - } - - /// Returns the low value. - pub fn low(&self) -> u64 { - self.low - } - - /// Returns the high value. - pub fn high(&self) -> u64 { - self.high - } - - #[cfg(test)] - pub(crate) fn random(rng: &mut TestRng) -> Self { - let low = rng.gen::() as u64; - let high = low + rng.gen::() as u64; - Self { low, high } - } -} - -impl Display for AvailableBlockRange { - fn fmt(&self, formatter: &mut Formatter<'_>) -> fmt::Result { - write!( - formatter, - "available block range [{}, {}]", - self.low, self.high - ) - } -} - -impl ToBytes for AvailableBlockRange { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.low.write_bytes(writer)?; - self.high.write_bytes(writer) - } - - fn serialized_length(&self) -> usize { - self.low.serialized_length() + self.high.serialized_length() - } -} - -impl FromBytes for AvailableBlockRange { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (low, remainder) = u64::from_bytes(bytes)?; - let (high, remainder) = u64::from_bytes(remainder)?; - Ok((AvailableBlockRange { low, high }, remainder)) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::testing::TestRng; - - #[test] - fn bytesrepr_roundtrip() { - let rng = &mut TestRng::new(); - - let val = AvailableBlockRange::random(rng); - bytesrepr::test_serialization_roundtrip(&val); - } -} diff --git a/casper_types_ver_2_0/src/block/block_body.rs b/casper_types_ver_2_0/src/block/block_body.rs deleted file mode 100644 index 5fa8f574..00000000 --- a/casper_types_ver_2_0/src/block/block_body.rs +++ /dev/null @@ -1,115 +0,0 @@ -mod block_body_v1; -mod block_body_v2; - -pub use block_body_v1::BlockBodyV1; -pub use block_body_v2::BlockBodyV2; - -use alloc::vec::Vec; -use core::fmt::{self, Display, Formatter}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -use serde::{Deserialize, Serialize}; - -use crate::bytesrepr::{self, FromBytes, ToBytes, U8_SERIALIZED_LENGTH}; - -const TAG_LENGTH: usize = U8_SERIALIZED_LENGTH; - -/// Tag for block body v1. -pub const BLOCK_BODY_V1_TAG: u8 = 0; -/// Tag for block body v2. -pub const BLOCK_BODY_V2_TAG: u8 = 1; - -/// The versioned body portion of a block. It encapsulates different variants of the BlockBody -/// struct. -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(any(feature = "testing", test), derive(PartialEq))] -#[derive(Clone, Serialize, Deserialize, Debug)] -pub enum BlockBody { - /// The legacy, initial version of the body portion of a block. - #[serde(rename = "Version1")] - V1(BlockBodyV1), - /// The version 2 of the body portion of a block, which includes the - /// `past_finality_signatures`. - #[serde(rename = "Version2")] - V2(BlockBodyV2), -} - -impl Display for BlockBody { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - match self { - BlockBody::V1(v1) => Display::fmt(&v1, formatter), - BlockBody::V2(v2) => Display::fmt(&v2, formatter), - } - } -} - -impl From for BlockBody { - fn from(body: BlockBodyV1) -> Self { - BlockBody::V1(body) - } -} - -impl From<&BlockBodyV2> for BlockBody { - fn from(body: &BlockBodyV2) -> Self { - BlockBody::V2(body.clone()) - } -} - -impl ToBytes for BlockBody { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - match self { - BlockBody::V1(v1) => { - buffer.insert(0, BLOCK_BODY_V1_TAG); - buffer.extend(v1.to_bytes()?); - } - BlockBody::V2(v2) => { - buffer.insert(0, BLOCK_BODY_V2_TAG); - buffer.extend(v2.to_bytes()?); - } - } - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - TAG_LENGTH - + match self { - BlockBody::V1(v1) => v1.serialized_length(), - BlockBody::V2(v2) => v2.serialized_length(), - } - } -} - -impl FromBytes for BlockBody { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (tag, remainder) = u8::from_bytes(bytes)?; - match tag { - BLOCK_BODY_V1_TAG => { - let (body, remainder): (BlockBodyV1, _) = FromBytes::from_bytes(remainder)?; - Ok((Self::V1(body), remainder)) - } - BLOCK_BODY_V2_TAG => { - let (body, remainder): (BlockBodyV2, _) = FromBytes::from_bytes(remainder)?; - Ok((Self::V2(body), remainder)) - } - _ => Err(bytesrepr::Error::Formatting), - } - } -} - -#[cfg(test)] -mod tests { - use crate::{bytesrepr, testing::TestRng, TestBlockBuilder, TestBlockV1Builder}; - - #[test] - fn bytesrepr_roundtrip() { - let rng = &mut TestRng::new(); - - let block_body_v1 = TestBlockV1Builder::new().build_versioned(rng).clone_body(); - bytesrepr::test_serialization_roundtrip(&block_body_v1); - - let block_body_v2 = TestBlockBuilder::new().build_versioned(rng).clone_body(); - bytesrepr::test_serialization_roundtrip(&block_body_v2); - } -} diff --git a/casper_types_ver_2_0/src/block/block_body/block_body_v1.rs b/casper_types_ver_2_0/src/block/block_body/block_body_v1.rs deleted file mode 100644 index e32ab4b9..00000000 --- a/casper_types_ver_2_0/src/block/block_body/block_body_v1.rs +++ /dev/null @@ -1,160 +0,0 @@ -use alloc::vec::Vec; -use core::fmt::{self, Display, Formatter}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(any(feature = "once_cell", test))] -use once_cell::sync::OnceCell; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - DeployHash, Digest, PublicKey, -}; - -/// The body portion of a block. Version 1. -#[derive(Clone, Eq, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -pub struct BlockBodyV1 { - /// The public key of the validator which proposed the block. - pub(super) proposer: PublicKey, - /// The deploy hashes of the non-transfer deploys within the block. - pub(super) deploy_hashes: Vec, - /// The deploy hashes of the transfers within the block. - pub(super) transfer_hashes: Vec, - #[serde(skip)] - #[cfg_attr( - all(any(feature = "once_cell", test), feature = "datasize"), - data_size(skip) - )] - #[cfg(any(feature = "once_cell", test))] - pub(super) hash: OnceCell, -} - -impl BlockBodyV1 { - /// Constructs a new `BlockBody`. - pub(crate) fn new( - proposer: PublicKey, - deploy_hashes: Vec, - transfer_hashes: Vec, - ) -> Self { - BlockBodyV1 { - proposer, - deploy_hashes, - transfer_hashes, - #[cfg(any(feature = "once_cell", test))] - hash: OnceCell::new(), - } - } - - /// Returns the public key of the validator which proposed the block. - pub fn proposer(&self) -> &PublicKey { - &self.proposer - } - - /// Returns the deploy hashes of the non-transfer deploys within the block. - pub fn deploy_hashes(&self) -> &[DeployHash] { - &self.deploy_hashes - } - - /// Returns the deploy hashes of the transfers within the block. - pub fn transfer_hashes(&self) -> &[DeployHash] { - &self.transfer_hashes - } - - /// Returns the deploy and transfer hashes in the order in which they were executed. - pub fn deploy_and_transfer_hashes(&self) -> impl Iterator { - self.deploy_hashes() - .iter() - .chain(self.transfer_hashes().iter()) - } - - /// Returns the body hash, i.e. the hash of the body's serialized bytes. - pub fn hash(&self) -> Digest { - #[cfg(any(feature = "once_cell", test))] - return *self.hash.get_or_init(|| self.compute_hash()); - - #[cfg(not(any(feature = "once_cell", test)))] - self.compute_hash() - } - - fn compute_hash(&self) -> Digest { - let serialized_body = self - .to_bytes() - .unwrap_or_else(|error| panic!("should serialize block body: {}", error)); - Digest::hash(serialized_body) - } -} - -impl PartialEq for BlockBodyV1 { - fn eq(&self, other: &BlockBodyV1) -> bool { - // Destructure to make sure we don't accidentally omit fields. - #[cfg(any(feature = "once_cell", test))] - let BlockBodyV1 { - proposer, - deploy_hashes, - transfer_hashes, - hash: _, - } = self; - #[cfg(not(any(feature = "once_cell", test)))] - let BlockBodyV1 { - proposer, - deploy_hashes, - transfer_hashes, - } = self; - *proposer == other.proposer - && *deploy_hashes == other.deploy_hashes - && *transfer_hashes == other.transfer_hashes - } -} - -impl Display for BlockBodyV1 { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - write!( - formatter, - "block body proposed by {}, {} deploys, {} transfers", - self.proposer, - self.deploy_hashes.len(), - self.transfer_hashes.len() - ) - } -} - -impl ToBytes for BlockBodyV1 { - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.proposer.write_bytes(writer)?; - self.deploy_hashes.write_bytes(writer)?; - self.transfer_hashes.write_bytes(writer) - } - - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - self.proposer.serialized_length() - + self.deploy_hashes.serialized_length() - + self.transfer_hashes.serialized_length() - } -} - -impl FromBytes for BlockBodyV1 { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (proposer, bytes) = PublicKey::from_bytes(bytes)?; - let (deploy_hashes, bytes) = Vec::::from_bytes(bytes)?; - let (transfer_hashes, bytes) = Vec::::from_bytes(bytes)?; - let body = BlockBodyV1 { - proposer, - deploy_hashes, - transfer_hashes, - #[cfg(any(feature = "once_cell", test))] - hash: OnceCell::new(), - }; - Ok((body, bytes)) - } -} diff --git a/casper_types_ver_2_0/src/block/block_body/block_body_v2.rs b/casper_types_ver_2_0/src/block/block_body/block_body_v2.rs deleted file mode 100644 index a417f022..00000000 --- a/casper_types_ver_2_0/src/block/block_body/block_body_v2.rs +++ /dev/null @@ -1,214 +0,0 @@ -use alloc::vec::Vec; -use core::fmt::{self, Display, Formatter}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(any(feature = "once_cell", test))] -use once_cell::sync::OnceCell; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -use crate::{ - block::RewardedSignatures, - bytesrepr::{self, FromBytes, ToBytes}, - Digest, PublicKey, TransactionHash, -}; - -/// The body portion of a block. Version 2. -#[derive(Clone, Eq, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -pub struct BlockBodyV2 { - /// The public key of the validator which proposed the block. - pub(super) proposer: PublicKey, - /// The hashes of the transfer transactions within the block. - pub(super) transfer: Vec, - /// The hashes of the non-transfer, native transactions within the block. - pub(super) staking: Vec, - /// The hashes of the installer/upgrader transactions within the block. - pub(super) install_upgrade: Vec, - /// The hashes of all other transactions within the block. - pub(super) standard: Vec, - /// List of identifiers for finality signatures for a particular past block. - pub(super) rewarded_signatures: RewardedSignatures, - #[serde(skip)] - #[cfg_attr( - all(any(feature = "once_cell", test), feature = "datasize"), - data_size(skip) - )] - #[cfg(any(feature = "once_cell", test))] - pub(super) hash: OnceCell, -} - -impl BlockBodyV2 { - /// Constructs a new `BlockBodyV2`. - pub(crate) fn new( - proposer: PublicKey, - transfer: Vec, - staking: Vec, - install_upgrade: Vec, - standard: Vec, - rewarded_signatures: RewardedSignatures, - ) -> Self { - BlockBodyV2 { - proposer, - transfer, - staking, - install_upgrade, - standard, - rewarded_signatures, - #[cfg(any(feature = "once_cell", test))] - hash: OnceCell::new(), - } - } - - /// Returns the public key of the validator which proposed the block. - pub fn proposer(&self) -> &PublicKey { - &self.proposer - } - - /// Returns the hashes of the transfer transactions within the block. - pub fn transfer(&self) -> impl Iterator { - self.transfer.iter() - } - - /// Returns the hashes of the non-transfer, native transactions within the block. - pub fn staking(&self) -> impl Iterator { - self.staking.iter() - } - - /// Returns the hashes of the installer/upgrader transactions within the block. - pub fn install_upgrade(&self) -> impl Iterator { - self.install_upgrade.iter() - } - - /// Returns the hashes of all other transactions within the block. - pub fn standard(&self) -> impl Iterator { - self.standard.iter() - } - - /// Returns all of the transaction hashes in the order in which they were executed. - pub fn all_transactions(&self) -> impl Iterator { - self.transfer() - .chain(self.staking()) - .chain(self.install_upgrade()) - .chain(self.standard()) - } - - /// Returns the body hash, i.e. the hash of the body's serialized bytes. - pub fn hash(&self) -> Digest { - #[cfg(any(feature = "once_cell", test))] - return *self.hash.get_or_init(|| self.compute_hash()); - - #[cfg(not(any(feature = "once_cell", test)))] - self.compute_hash() - } - - fn compute_hash(&self) -> Digest { - let serialized_body = self - .to_bytes() - .unwrap_or_else(|error| panic!("should serialize block body: {}", error)); - Digest::hash(serialized_body) - } - - /// Return the list of identifiers for finality signatures for a particular past block. - pub fn rewarded_signatures(&self) -> &RewardedSignatures { - &self.rewarded_signatures - } -} - -impl PartialEq for BlockBodyV2 { - fn eq(&self, other: &BlockBodyV2) -> bool { - // Destructure to make sure we don't accidentally omit fields. - #[cfg(any(feature = "once_cell", test))] - let BlockBodyV2 { - proposer, - transfer, - staking, - install_upgrade, - standard, - rewarded_signatures, - hash: _, - } = self; - #[cfg(not(any(feature = "once_cell", test)))] - let BlockBodyV2 { - proposer, - transfer, - staking, - install_upgrade, - standard, - rewarded_signatures, - } = self; - *proposer == other.proposer - && *transfer == other.transfer - && *staking == other.staking - && *install_upgrade == other.install_upgrade - && *standard == other.standard - && *rewarded_signatures == other.rewarded_signatures - } -} - -impl Display for BlockBodyV2 { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - write!( - formatter, - "block body proposed by {}, {} transfers, {} non-transfer-native, {} \ - installer/upgraders, {} others", - self.proposer, - self.transfer.len(), - self.staking.len(), - self.install_upgrade.len(), - self.standard.len() - ) - } -} - -impl ToBytes for BlockBodyV2 { - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.proposer.write_bytes(writer)?; - self.transfer.write_bytes(writer)?; - self.staking.write_bytes(writer)?; - self.install_upgrade.write_bytes(writer)?; - self.standard.write_bytes(writer)?; - self.rewarded_signatures.write_bytes(writer)?; - Ok(()) - } - - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - self.proposer.serialized_length() - + self.transfer.serialized_length() - + self.staking.serialized_length() - + self.install_upgrade.serialized_length() - + self.standard.serialized_length() - + self.rewarded_signatures.serialized_length() - } -} - -impl FromBytes for BlockBodyV2 { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (proposer, bytes) = PublicKey::from_bytes(bytes)?; - let (transfer, bytes) = Vec::::from_bytes(bytes)?; - let (staking, bytes) = Vec::::from_bytes(bytes)?; - let (install_upgrade, bytes) = Vec::::from_bytes(bytes)?; - let (standard, bytes) = Vec::::from_bytes(bytes)?; - let (rewarded_signatures, bytes) = RewardedSignatures::from_bytes(bytes)?; - let body = BlockBodyV2 { - proposer, - transfer, - staking, - install_upgrade, - standard, - rewarded_signatures, - #[cfg(any(feature = "once_cell", test))] - hash: OnceCell::new(), - }; - Ok((body, bytes)) - } -} diff --git a/casper_types_ver_2_0/src/block/block_hash.rs b/casper_types_ver_2_0/src/block/block_hash.rs deleted file mode 100644 index f6906c33..00000000 --- a/casper_types_ver_2_0/src/block/block_hash.rs +++ /dev/null @@ -1,131 +0,0 @@ -use alloc::vec::Vec; -use core::fmt::{self, Display, Formatter}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(feature = "json-schema")] -use once_cell::sync::Lazy; -#[cfg(any(feature = "testing", test))] -use rand::Rng; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -#[cfg(doc)] -use super::Block; -#[cfg(doc)] -use super::BlockV2; -#[cfg(any(feature = "testing", test))] -use crate::testing::TestRng; -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - Digest, -}; - -#[cfg(feature = "json-schema")] -static BLOCK_HASH: Lazy = - Lazy::new(|| BlockHash::new(Digest::from([7; BlockHash::LENGTH]))); - -/// The cryptographic hash of a [`Block`]. -#[derive( - Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Serialize, Deserialize, Debug, Default, -)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr( - feature = "json-schema", - derive(JsonSchema), - schemars(description = "Hex-encoded cryptographic hash of a block.") -)] -#[serde(deny_unknown_fields)] -pub struct BlockHash(Digest); - -impl BlockHash { - /// The number of bytes in a `BlockHash` digest. - pub const LENGTH: usize = Digest::LENGTH; - - /// Constructs a new `BlockHash`. - pub fn new(hash: Digest) -> Self { - BlockHash(hash) - } - - /// Returns the wrapped inner digest. - pub fn inner(&self) -> &Digest { - &self.0 - } - - // This method is not intended to be used by third party crates. - #[doc(hidden)] - #[cfg(feature = "json-schema")] - pub fn example() -> &'static Self { - &BLOCK_HASH - } - - /// Returns a new `DeployHash` directly initialized with the provided bytes; no hashing is done. - #[cfg(any(feature = "testing", test))] - pub const fn from_raw(raw_digest: [u8; Self::LENGTH]) -> Self { - BlockHash(Digest::from_raw(raw_digest)) - } - - /// Returns a random `DeployHash`. - #[cfg(any(feature = "testing", test))] - pub fn random(rng: &mut TestRng) -> Self { - let hash = rng.gen::<[u8; Self::LENGTH]>().into(); - BlockHash(hash) - } -} - -impl From for BlockHash { - fn from(digest: Digest) -> Self { - Self(digest) - } -} - -impl From for Digest { - fn from(block_hash: BlockHash) -> Self { - block_hash.0 - } -} - -impl Display for BlockHash { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - write!(formatter, "block-hash({})", self.0) - } -} - -impl AsRef<[u8]> for BlockHash { - fn as_ref(&self) -> &[u8] { - self.0.as_ref() - } -} - -impl ToBytes for BlockHash { - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.0.write_bytes(writer) - } - - fn to_bytes(&self) -> Result, bytesrepr::Error> { - self.0.to_bytes() - } - - fn serialized_length(&self) -> usize { - self.0.serialized_length() - } -} - -impl FromBytes for BlockHash { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - Digest::from_bytes(bytes).map(|(inner, remainder)| (BlockHash(inner), remainder)) - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn bytesrepr_roundtrip() { - let rng = &mut TestRng::new(); - let hash = BlockHash::random(rng); - bytesrepr::test_serialization_roundtrip(&hash); - } -} diff --git a/casper_types_ver_2_0/src/block/block_hash_and_height.rs b/casper_types_ver_2_0/src/block/block_hash_and_height.rs deleted file mode 100644 index b9a48796..00000000 --- a/casper_types_ver_2_0/src/block/block_hash_and_height.rs +++ /dev/null @@ -1,114 +0,0 @@ -use core::fmt::{self, Display, Formatter}; - -use alloc::vec::Vec; -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(any(feature = "testing", test))] -use rand::Rng; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -use super::BlockHash; -#[cfg(doc)] -use super::BlockV2; -use crate::bytesrepr::{self, FromBytes, ToBytes}; -#[cfg(any(feature = "testing", test))] -use crate::testing::TestRng; - -/// The block hash and height of a given block. -#[derive( - Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Serialize, Deserialize, Debug, Default, -)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -pub struct BlockHashAndHeight { - /// The hash of the block. - block_hash: BlockHash, - /// The height of the block. - block_height: u64, -} - -impl BlockHashAndHeight { - /// Constructs a new `BlockHashAndHeight`. - pub fn new(block_hash: BlockHash, block_height: u64) -> Self { - Self { - block_hash, - block_height, - } - } - - /// Returns the hash of the block. - pub fn block_hash(&self) -> &BlockHash { - &self.block_hash - } - - /// Returns the height of the block. - pub fn block_height(&self) -> u64 { - self.block_height - } - - /// Returns a random `BlockHashAndHeight`. - #[cfg(any(feature = "testing", test))] - pub fn random(rng: &mut TestRng) -> Self { - Self { - block_hash: BlockHash::random(rng), - block_height: rng.gen(), - } - } -} - -impl Display for BlockHashAndHeight { - fn fmt(&self, formatter: &mut Formatter<'_>) -> fmt::Result { - write!( - formatter, - "{}, height {} ", - self.block_hash, self.block_height - ) - } -} - -impl ToBytes for BlockHashAndHeight { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.block_hash.write_bytes(writer)?; - self.block_height.write_bytes(writer) - } - - fn serialized_length(&self) -> usize { - self.block_hash.serialized_length() + self.block_height.serialized_length() - } -} - -impl FromBytes for BlockHashAndHeight { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (block_hash, remainder) = BlockHash::from_bytes(bytes)?; - let (block_height, remainder) = u64::from_bytes(remainder)?; - Ok(( - BlockHashAndHeight { - block_hash, - block_height, - }, - remainder, - )) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::testing::TestRng; - - #[test] - fn bytesrepr_roundtrip() { - let rng = &mut TestRng::new(); - - let val = BlockHashAndHeight::random(rng); - bytesrepr::test_serialization_roundtrip(&val); - } -} diff --git a/casper_types_ver_2_0/src/block/block_header.rs b/casper_types_ver_2_0/src/block/block_header.rs deleted file mode 100644 index 8c683a57..00000000 --- a/casper_types_ver_2_0/src/block/block_header.rs +++ /dev/null @@ -1,287 +0,0 @@ -mod block_header_v1; -mod block_header_v2; - -pub use block_header_v1::BlockHeaderV1; -pub use block_header_v2::BlockHeaderV2; - -use alloc::{collections::BTreeMap, vec::Vec}; -use core::fmt::{self, Display, Formatter}; - -#[cfg(feature = "std")] -use crate::ProtocolConfig; -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -#[cfg(any(feature = "std", test))] -use serde::{Deserialize, Serialize}; - -use crate::{ - bytesrepr::{self, FromBytes, ToBytes, U8_SERIALIZED_LENGTH}, - BlockHash, Digest, EraEnd, EraId, ProtocolVersion, PublicKey, Timestamp, U512, -}; - -const TAG_LENGTH: usize = U8_SERIALIZED_LENGTH; - -/// Tag for block header v1. -pub const BLOCK_HEADER_V1_TAG: u8 = 0; -/// Tag for block header v2. -pub const BLOCK_HEADER_V2_TAG: u8 = 1; - -/// The versioned header portion of a block. It encapsulates different variants of the BlockHeader -/// struct. -#[derive(Clone, Debug, Eq, PartialEq)] -#[cfg_attr(any(feature = "std", test), derive(Serialize, Deserialize))] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -pub enum BlockHeader { - /// The legacy, initial version of the header portion of a block. - #[cfg_attr(any(feature = "std", test), serde(rename = "Version1"))] - V1(BlockHeaderV1), - /// The version 2 of the header portion of a block. - #[cfg_attr(any(feature = "std", test), serde(rename = "Version2"))] - V2(BlockHeaderV2), -} - -impl BlockHeader { - /// Returns the hash of this block header. - pub fn block_hash(&self) -> BlockHash { - match self { - BlockHeader::V1(v1) => v1.block_hash(), - BlockHeader::V2(v2) => v2.block_hash(), - } - } - - /// Returns the parent block's hash. - pub fn parent_hash(&self) -> &BlockHash { - match self { - BlockHeader::V1(v1) => v1.parent_hash(), - BlockHeader::V2(v2) => v2.parent_hash(), - } - } - - /// Returns the root hash of global state after the deploys in this block have been executed. - pub fn state_root_hash(&self) -> &Digest { - match self { - BlockHeader::V1(v1) => v1.state_root_hash(), - BlockHeader::V2(v2) => v2.state_root_hash(), - } - } - - /// Returns the hash of the block's body. - pub fn body_hash(&self) -> &Digest { - match self { - BlockHeader::V1(v1) => v1.body_hash(), - BlockHeader::V2(v2) => v2.body_hash(), - } - } - - /// Returns a random bit needed for initializing a future era. - pub fn random_bit(&self) -> bool { - match self { - BlockHeader::V1(v1) => v1.random_bit(), - BlockHeader::V2(v2) => v2.random_bit(), - } - } - - /// Returns a seed needed for initializing a future era. - pub fn accumulated_seed(&self) -> &Digest { - match self { - BlockHeader::V1(v1) => v1.accumulated_seed(), - BlockHeader::V2(v2) => v2.accumulated_seed(), - } - } - - /// Returns the `EraEnd` of a block if it is a switch block. - pub fn clone_era_end(&self) -> Option { - match self { - BlockHeader::V1(v1) => v1.era_end().map(|ee| ee.clone().into()), - BlockHeader::V2(v2) => v2.era_end().map(|ee| ee.clone().into()), - } - } - - /// Returns equivocators if the header is of a switch block. - pub fn maybe_equivocators(&self) -> Option<&[PublicKey]> { - match self { - BlockHeader::V1(v1) => v1.era_end().map(|ee| ee.equivocators()), - BlockHeader::V2(v2) => v2.era_end().map(|ee| ee.equivocators()), - } - } - - /// Returns equivocators if the header is of a switch block. - pub fn maybe_inactive_validators(&self) -> Option<&[PublicKey]> { - match self { - BlockHeader::V1(v1) => v1.era_end().map(|ee| ee.inactive_validators()), - BlockHeader::V2(v2) => v2.era_end().map(|ee| ee.inactive_validators()), - } - } - - /// Returns the timestamp from when the block was proposed. - pub fn timestamp(&self) -> Timestamp { - match self { - BlockHeader::V1(v1) => v1.timestamp(), - BlockHeader::V2(v2) => v2.timestamp(), - } - } - - /// Returns the era ID in which this block was created. - pub fn era_id(&self) -> EraId { - match self { - BlockHeader::V1(v1) => v1.era_id(), - BlockHeader::V2(v2) => v2.era_id(), - } - } - - /// Returns the era ID in which the next block would be created (i.e. this block's era ID, or - /// its successor if this is a switch block). - pub fn next_block_era_id(&self) -> EraId { - match self { - BlockHeader::V1(v1) => v1.next_block_era_id(), - BlockHeader::V2(v2) => v2.next_block_era_id(), - } - } - - /// Returns the height of this block, i.e. the number of ancestors. - pub fn height(&self) -> u64 { - match self { - BlockHeader::V1(v1) => v1.height(), - BlockHeader::V2(v2) => v2.height(), - } - } - - /// Returns the protocol version of the network from when this block was created. - pub fn protocol_version(&self) -> ProtocolVersion { - match self { - BlockHeader::V1(v1) => v1.protocol_version(), - BlockHeader::V2(v2) => v2.protocol_version(), - } - } - - /// Returns `true` if this block is the last one in the current era. - pub fn is_switch_block(&self) -> bool { - match self { - BlockHeader::V1(v1) => v1.is_switch_block(), - BlockHeader::V2(v2) => v2.is_switch_block(), - } - } - - /// Returns the validators for the upcoming era and their respective weights (if this is a - /// switch block). - pub fn next_era_validator_weights(&self) -> Option<&BTreeMap> { - match self { - BlockHeader::V1(v1) => v1.next_era_validator_weights(), - BlockHeader::V2(v2) => v2.next_era_validator_weights(), - } - } - - /// Returns `true` if this block is the Genesis block, i.e. has height 0 and era 0. - pub fn is_genesis(&self) -> bool { - match self { - BlockHeader::V1(v1) => v1.is_genesis(), - BlockHeader::V2(v2) => v2.is_genesis(), - } - } - - /// Returns `true` if this block belongs to the last block before the upgrade to the - /// current protocol version. - #[cfg(feature = "std")] - pub fn is_last_block_before_activation(&self, protocol_config: &ProtocolConfig) -> bool { - match self { - BlockHeader::V1(v1) => v1.is_last_block_before_activation(protocol_config), - BlockHeader::V2(v2) => v2.is_last_block_before_activation(protocol_config), - } - } - - // This method is not intended to be used by third party crates. - // - // Sets the block hash without recomputing it. Must only be called with the correct hash. - #[doc(hidden)] - #[cfg(any(feature = "once_cell", test))] - pub fn set_block_hash(&self, block_hash: BlockHash) { - match self { - BlockHeader::V1(v1) => v1.set_block_hash(block_hash), - BlockHeader::V2(v2) => v2.set_block_hash(block_hash), - } - } -} - -impl Display for BlockHeader { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - match self { - BlockHeader::V1(v1) => Display::fmt(&v1, formatter), - BlockHeader::V2(v2) => Display::fmt(&v2, formatter), - } - } -} - -impl From for BlockHeader { - fn from(header: BlockHeaderV1) -> Self { - BlockHeader::V1(header) - } -} - -impl From for BlockHeader { - fn from(header: BlockHeaderV2) -> Self { - BlockHeader::V2(header) - } -} - -impl ToBytes for BlockHeader { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - match self { - BlockHeader::V1(v1) => { - buffer.insert(0, BLOCK_HEADER_V1_TAG); - buffer.extend(v1.to_bytes()?); - } - BlockHeader::V2(v2) => { - buffer.insert(0, BLOCK_HEADER_V2_TAG); - buffer.extend(v2.to_bytes()?); - } - } - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - TAG_LENGTH - + match self { - BlockHeader::V1(v1) => v1.serialized_length(), - BlockHeader::V2(v2) => v2.serialized_length(), - } - } -} - -impl FromBytes for BlockHeader { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (tag, remainder) = u8::from_bytes(bytes)?; - match tag { - BLOCK_HEADER_V1_TAG => { - let (header, remainder): (BlockHeaderV1, _) = FromBytes::from_bytes(remainder)?; - Ok((Self::V1(header), remainder)) - } - BLOCK_HEADER_V2_TAG => { - let (header, remainder): (BlockHeaderV2, _) = FromBytes::from_bytes(remainder)?; - Ok((Self::V2(header), remainder)) - } - _ => Err(bytesrepr::Error::Formatting), - } - } -} - -#[cfg(test)] -mod tests { - use crate::{bytesrepr, testing::TestRng, TestBlockBuilder, TestBlockV1Builder}; - - #[test] - fn bytesrepr_roundtrip() { - let rng = &mut TestRng::new(); - - let block_header_v1 = TestBlockV1Builder::new() - .build_versioned(rng) - .clone_header(); - bytesrepr::test_serialization_roundtrip(&block_header_v1); - - let block_header_v2 = TestBlockBuilder::new().build_versioned(rng).clone_header(); - bytesrepr::test_serialization_roundtrip(&block_header_v2); - } -} diff --git a/casper_types_ver_2_0/src/block/block_header/block_header_v1.rs b/casper_types_ver_2_0/src/block/block_header/block_header_v1.rs deleted file mode 100644 index 7fb64818..00000000 --- a/casper_types_ver_2_0/src/block/block_header/block_header_v1.rs +++ /dev/null @@ -1,372 +0,0 @@ -use alloc::{collections::BTreeMap, vec::Vec}; -use core::fmt::{self, Display, Formatter}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(feature = "json-schema")] -use once_cell::sync::Lazy; -#[cfg(any(feature = "once_cell", test))] -use once_cell::sync::OnceCell; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -#[cfg(any(feature = "std", test))] -use serde::{Deserialize, Serialize}; - -use crate::{ - block::{BlockHash, EraEndV1}, - bytesrepr::{self, FromBytes, ToBytes}, - Digest, EraId, ProtocolVersion, PublicKey, Timestamp, U512, -}; -#[cfg(feature = "std")] -use crate::{ActivationPoint, ProtocolConfig}; - -#[cfg(feature = "json-schema")] -static BLOCK_HEADER_V1: Lazy = Lazy::new(|| { - let parent_hash = BlockHash::new(Digest::from([7; Digest::LENGTH])); - let state_root_hash = Digest::from([8; Digest::LENGTH]); - let random_bit = true; - let era_end = Some(EraEndV1::example().clone()); - let timestamp = *Timestamp::example(); - let era_id = EraId::from(1); - let height: u64 = 10; - let protocol_version = ProtocolVersion::V1_0_0; - let accumulated_seed = Digest::hash_pair(Digest::from([9; Digest::LENGTH]), [random_bit as u8]); - let body_hash = Digest::from([5; Digest::LENGTH]); - BlockHeaderV1::new( - parent_hash, - state_root_hash, - body_hash, - random_bit, - accumulated_seed, - era_end, - timestamp, - era_id, - height, - protocol_version, - #[cfg(any(feature = "once_cell", test))] - OnceCell::new(), - ) -}); - -/// The header portion of a block. -#[derive(Clone, Debug, Eq)] -#[cfg_attr(any(feature = "std", test), derive(Serialize, Deserialize))] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -pub struct BlockHeaderV1 { - /// The parent block's hash. - pub(super) parent_hash: BlockHash, - /// The root hash of global state after the deploys in this block have been executed. - pub(super) state_root_hash: Digest, - /// The hash of the block's body. - pub(super) body_hash: Digest, - /// A random bit needed for initializing a future era. - pub(super) random_bit: bool, - /// A seed needed for initializing a future era. - pub(super) accumulated_seed: Digest, - /// The `EraEnd` of a block if it is a switch block. - pub(super) era_end: Option, - /// The timestamp from when the block was proposed. - pub(super) timestamp: Timestamp, - /// The era ID in which this block was created. - pub(super) era_id: EraId, - /// The height of this block, i.e. the number of ancestors. - pub(super) height: u64, - /// The protocol version of the network from when this block was created. - pub(super) protocol_version: ProtocolVersion, - #[cfg_attr(any(all(feature = "std", feature = "once_cell"), test), serde(skip))] - #[cfg_attr( - all(any(feature = "once_cell", test), feature = "datasize"), - data_size(skip) - )] - #[cfg(any(feature = "once_cell", test))] - pub(super) block_hash: OnceCell, -} - -impl BlockHeaderV1 { - /// Returns the hash of this block header. - pub fn block_hash(&self) -> BlockHash { - #[cfg(any(feature = "once_cell", test))] - return *self.block_hash.get_or_init(|| self.compute_block_hash()); - - #[cfg(not(any(feature = "once_cell", test)))] - self.compute_block_hash() - } - - /// Returns the parent block's hash. - pub fn parent_hash(&self) -> &BlockHash { - &self.parent_hash - } - - /// Returns the root hash of global state after the deploys in this block have been executed. - pub fn state_root_hash(&self) -> &Digest { - &self.state_root_hash - } - - /// Returns the hash of the block's body. - pub fn body_hash(&self) -> &Digest { - &self.body_hash - } - - /// Returns a random bit needed for initializing a future era. - pub fn random_bit(&self) -> bool { - self.random_bit - } - - /// Returns a seed needed for initializing a future era. - pub fn accumulated_seed(&self) -> &Digest { - &self.accumulated_seed - } - - /// Returns the `EraEnd` of a block if it is a switch block. - pub fn era_end(&self) -> Option<&EraEndV1> { - self.era_end.as_ref() - } - - /// Returns the timestamp from when the block was proposed. - pub fn timestamp(&self) -> Timestamp { - self.timestamp - } - - /// Returns the era ID in which this block was created. - pub fn era_id(&self) -> EraId { - self.era_id - } - - /// Returns the era ID in which the next block would be created (i.e. this block's era ID, or - /// its successor if this is a switch block). - pub fn next_block_era_id(&self) -> EraId { - if self.era_end.is_some() { - self.era_id.successor() - } else { - self.era_id - } - } - - /// Returns the height of this block, i.e. the number of ancestors. - pub fn height(&self) -> u64 { - self.height - } - - /// Returns the protocol version of the network from when this block was created. - pub fn protocol_version(&self) -> ProtocolVersion { - self.protocol_version - } - - /// Returns `true` if this block is the last one in the current era. - pub fn is_switch_block(&self) -> bool { - self.era_end.is_some() - } - - /// Returns the validators for the upcoming era and their respective weights (if this is a - /// switch block). - pub fn next_era_validator_weights(&self) -> Option<&BTreeMap> { - self.era_end - .as_ref() - .map(|era_end| era_end.next_era_validator_weights()) - } - - /// Returns `true` if this block is the Genesis block, i.e. has height 0 and era 0. - pub fn is_genesis(&self) -> bool { - self.era_id().is_genesis() && self.height() == 0 - } - - /// Returns `true` if this block belongs to the last block before the upgrade to the - /// current protocol version. - #[cfg(feature = "std")] - pub fn is_last_block_before_activation(&self, protocol_config: &ProtocolConfig) -> bool { - protocol_config.version > self.protocol_version - && self.is_switch_block() - && ActivationPoint::EraId(self.next_block_era_id()) == protocol_config.activation_point - } - - pub(crate) fn compute_block_hash(&self) -> BlockHash { - let serialized_header = self - .to_bytes() - .unwrap_or_else(|error| panic!("should serialize block header: {}", error)); - BlockHash::new(Digest::hash(serialized_header)) - } - - // This method is not intended to be used by third party crates. - #[doc(hidden)] - #[allow(clippy::too_many_arguments)] - pub fn new( - parent_hash: BlockHash, - state_root_hash: Digest, - body_hash: Digest, - random_bit: bool, - accumulated_seed: Digest, - era_end: Option, - timestamp: Timestamp, - era_id: EraId, - height: u64, - protocol_version: ProtocolVersion, - #[cfg(any(feature = "once_cell", test))] block_hash: OnceCell, - ) -> Self { - BlockHeaderV1 { - parent_hash, - state_root_hash, - body_hash, - random_bit, - accumulated_seed, - era_end, - timestamp, - era_id, - height, - protocol_version, - #[cfg(any(feature = "once_cell", test))] - block_hash, - } - } - - // This method is not intended to be used by third party crates. - // - // Sets the block hash without recomputing it. Must only be called with the correct hash. - #[doc(hidden)] - #[cfg(any(feature = "once_cell", test))] - pub fn set_block_hash(&self, block_hash: BlockHash) { - self.block_hash.get_or_init(|| block_hash); - } - - // This method is not intended to be used by third party crates. - #[doc(hidden)] - #[cfg(feature = "json-schema")] - pub fn example() -> &'static Self { - &BLOCK_HEADER_V1 - } - - #[cfg(test)] - pub(crate) fn set_body_hash(&mut self, new_body_hash: Digest) { - self.body_hash = new_body_hash; - } -} - -impl PartialEq for BlockHeaderV1 { - fn eq(&self, other: &BlockHeaderV1) -> bool { - // Destructure to make sure we don't accidentally omit fields. - #[cfg(any(feature = "once_cell", test))] - let BlockHeaderV1 { - parent_hash, - state_root_hash, - body_hash, - random_bit, - accumulated_seed, - era_end, - timestamp, - era_id, - height, - protocol_version, - block_hash: _, - } = self; - #[cfg(not(any(feature = "once_cell", test)))] - let BlockHeaderV1 { - parent_hash, - state_root_hash, - body_hash, - random_bit, - accumulated_seed, - era_end, - timestamp, - era_id, - height, - protocol_version, - } = self; - *parent_hash == other.parent_hash - && *state_root_hash == other.state_root_hash - && *body_hash == other.body_hash - && *random_bit == other.random_bit - && *accumulated_seed == other.accumulated_seed - && *era_end == other.era_end - && *timestamp == other.timestamp - && *era_id == other.era_id - && *height == other.height - && *protocol_version == other.protocol_version - } -} - -impl Display for BlockHeaderV1 { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - write!( - formatter, - "block header #{}, {}, timestamp {}, {}, parent {}, post-state hash {}, body hash {}, \ - random bit {}, protocol version: {}", - self.height, - self.block_hash(), - self.timestamp, - self.era_id, - self.parent_hash.inner(), - self.state_root_hash, - self.body_hash, - self.random_bit, - self.protocol_version, - )?; - if let Some(era_end) = &self.era_end { - write!(formatter, ", era_end: {}", era_end)?; - } - Ok(()) - } -} - -impl ToBytes for BlockHeaderV1 { - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.parent_hash.write_bytes(writer)?; - self.state_root_hash.write_bytes(writer)?; - self.body_hash.write_bytes(writer)?; - self.random_bit.write_bytes(writer)?; - self.accumulated_seed.write_bytes(writer)?; - self.era_end.write_bytes(writer)?; - self.timestamp.write_bytes(writer)?; - self.era_id.write_bytes(writer)?; - self.height.write_bytes(writer)?; - self.protocol_version.write_bytes(writer) - } - - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - self.parent_hash.serialized_length() - + self.state_root_hash.serialized_length() - + self.body_hash.serialized_length() - + self.random_bit.serialized_length() - + self.accumulated_seed.serialized_length() - + self.era_end.serialized_length() - + self.timestamp.serialized_length() - + self.era_id.serialized_length() - + self.height.serialized_length() - + self.protocol_version.serialized_length() - } -} - -impl FromBytes for BlockHeaderV1 { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (parent_hash, remainder) = BlockHash::from_bytes(bytes)?; - let (state_root_hash, remainder) = Digest::from_bytes(remainder)?; - let (body_hash, remainder) = Digest::from_bytes(remainder)?; - let (random_bit, remainder) = bool::from_bytes(remainder)?; - let (accumulated_seed, remainder) = Digest::from_bytes(remainder)?; - let (era_end, remainder) = Option::from_bytes(remainder)?; - let (timestamp, remainder) = Timestamp::from_bytes(remainder)?; - let (era_id, remainder) = EraId::from_bytes(remainder)?; - let (height, remainder) = u64::from_bytes(remainder)?; - let (protocol_version, remainder) = ProtocolVersion::from_bytes(remainder)?; - let block_header = BlockHeaderV1 { - parent_hash, - state_root_hash, - body_hash, - random_bit, - accumulated_seed, - era_end, - timestamp, - era_id, - height, - protocol_version, - #[cfg(any(feature = "once_cell", test))] - block_hash: OnceCell::new(), - }; - Ok((block_header, remainder)) - } -} diff --git a/casper_types_ver_2_0/src/block/block_header/block_header_v2.rs b/casper_types_ver_2_0/src/block/block_header/block_header_v2.rs deleted file mode 100644 index 14d11bac..00000000 --- a/casper_types_ver_2_0/src/block/block_header/block_header_v2.rs +++ /dev/null @@ -1,371 +0,0 @@ -use alloc::{collections::BTreeMap, vec::Vec}; -use core::fmt::{self, Display, Formatter}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(feature = "json-schema")] -use once_cell::sync::Lazy; -#[cfg(any(feature = "once_cell", test))] -use once_cell::sync::OnceCell; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -#[cfg(any(feature = "std", test))] -use serde::{Deserialize, Serialize}; - -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - BlockHash, Digest, EraEndV2, EraId, ProtocolVersion, PublicKey, Timestamp, U512, -}; -#[cfg(feature = "std")] -use crate::{ActivationPoint, ProtocolConfig}; - -#[cfg(feature = "json-schema")] -static BLOCK_HEADER_V2: Lazy = Lazy::new(|| { - let parent_hash = BlockHash::new(Digest::from([7; Digest::LENGTH])); - let state_root_hash = Digest::from([8; Digest::LENGTH]); - let random_bit = true; - let era_end = Some(EraEndV2::example().clone()); - let timestamp = *Timestamp::example(); - let era_id = EraId::from(1); - let height: u64 = 10; - let protocol_version = ProtocolVersion::V1_0_0; - let accumulated_seed = Digest::hash_pair(Digest::from([9; Digest::LENGTH]), [random_bit as u8]); - let body_hash = Digest::from([5; Digest::LENGTH]); - BlockHeaderV2::new( - parent_hash, - state_root_hash, - body_hash, - random_bit, - accumulated_seed, - era_end, - timestamp, - era_id, - height, - protocol_version, - #[cfg(any(feature = "once_cell", test))] - OnceCell::new(), - ) -}); - -/// The header portion of a block. -#[derive(Clone, Debug, Eq)] -#[cfg_attr(any(feature = "std", test), derive(Serialize, Deserialize))] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -pub struct BlockHeaderV2 { - /// The parent block's hash. - pub(super) parent_hash: BlockHash, - /// The root hash of global state after the deploys in this block have been executed. - pub(super) state_root_hash: Digest, - /// The hash of the block's body. - pub(super) body_hash: Digest, - /// A random bit needed for initializing a future era. - pub(super) random_bit: bool, - /// A seed needed for initializing a future era. - pub(super) accumulated_seed: Digest, - /// The `EraEnd` of a block if it is a switch block. - pub(super) era_end: Option, - /// The timestamp from when the block was proposed. - pub(super) timestamp: Timestamp, - /// The era ID in which this block was created. - pub(super) era_id: EraId, - /// The height of this block, i.e. the number of ancestors. - pub(super) height: u64, - /// The protocol version of the network from when this block was created. - pub(super) protocol_version: ProtocolVersion, - #[cfg_attr(any(all(feature = "std", feature = "once_cell"), test), serde(skip))] - #[cfg_attr( - all(any(feature = "once_cell", test), feature = "datasize"), - data_size(skip) - )] - #[cfg(any(feature = "once_cell", test))] - pub(super) block_hash: OnceCell, -} - -impl BlockHeaderV2 { - /// Returns the hash of this block header. - pub fn block_hash(&self) -> BlockHash { - #[cfg(any(feature = "once_cell", test))] - return *self.block_hash.get_or_init(|| self.compute_block_hash()); - - #[cfg(not(any(feature = "once_cell", test)))] - self.compute_block_hash() - } - - /// Returns the parent block's hash. - pub fn parent_hash(&self) -> &BlockHash { - &self.parent_hash - } - - /// Returns the root hash of global state after the deploys in this block have been executed. - pub fn state_root_hash(&self) -> &Digest { - &self.state_root_hash - } - - /// Returns the hash of the block's body. - pub fn body_hash(&self) -> &Digest { - &self.body_hash - } - - /// Returns a random bit needed for initializing a future era. - pub fn random_bit(&self) -> bool { - self.random_bit - } - - /// Returns a seed needed for initializing a future era. - pub fn accumulated_seed(&self) -> &Digest { - &self.accumulated_seed - } - - /// Returns the `EraEnd` of a block if it is a switch block. - pub fn era_end(&self) -> Option<&EraEndV2> { - self.era_end.as_ref() - } - - /// Returns the timestamp from when the block was proposed. - pub fn timestamp(&self) -> Timestamp { - self.timestamp - } - - /// Returns the era ID in which this block was created. - pub fn era_id(&self) -> EraId { - self.era_id - } - - /// Returns the era ID in which the next block would be created (i.e. this block's era ID, or - /// its successor if this is a switch block). - pub fn next_block_era_id(&self) -> EraId { - if self.era_end.is_some() { - self.era_id.successor() - } else { - self.era_id - } - } - - /// Returns the height of this block, i.e. the number of ancestors. - pub fn height(&self) -> u64 { - self.height - } - - /// Returns the protocol version of the network from when this block was created. - pub fn protocol_version(&self) -> ProtocolVersion { - self.protocol_version - } - - /// Returns `true` if this block is the last one in the current era. - pub fn is_switch_block(&self) -> bool { - self.era_end.is_some() - } - - /// Returns the validators for the upcoming era and their respective weights (if this is a - /// switch block). - pub fn next_era_validator_weights(&self) -> Option<&BTreeMap> { - self.era_end - .as_ref() - .map(|era_end| era_end.next_era_validator_weights()) - } - - /// Returns `true` if this block is the Genesis block, i.e. has height 0 and era 0. - pub fn is_genesis(&self) -> bool { - self.era_id().is_genesis() && self.height() == 0 - } - - /// Returns `true` if this block belongs to the last block before the upgrade to the - /// current protocol version. - #[cfg(feature = "std")] - pub fn is_last_block_before_activation(&self, protocol_config: &ProtocolConfig) -> bool { - protocol_config.version > self.protocol_version - && self.is_switch_block() - && ActivationPoint::EraId(self.next_block_era_id()) == protocol_config.activation_point - } - - pub(crate) fn compute_block_hash(&self) -> BlockHash { - let serialized_header = self - .to_bytes() - .unwrap_or_else(|error| panic!("should serialize block header: {}", error)); - BlockHash::new(Digest::hash(serialized_header)) - } - - // This method is not intended to be used by third party crates. - #[doc(hidden)] - #[allow(clippy::too_many_arguments)] - pub fn new( - parent_hash: BlockHash, - state_root_hash: Digest, - body_hash: Digest, - random_bit: bool, - accumulated_seed: Digest, - era_end: Option, - timestamp: Timestamp, - era_id: EraId, - height: u64, - protocol_version: ProtocolVersion, - #[cfg(any(feature = "once_cell", test))] block_hash: OnceCell, - ) -> Self { - BlockHeaderV2 { - parent_hash, - state_root_hash, - body_hash, - random_bit, - accumulated_seed, - era_end, - timestamp, - era_id, - height, - protocol_version, - #[cfg(any(feature = "once_cell", test))] - block_hash, - } - } - - // This method is not intended to be used by third party crates. - // - // Sets the block hash without recomputing it. Must only be called with the correct hash. - #[doc(hidden)] - #[cfg(any(feature = "once_cell", test))] - pub fn set_block_hash(&self, block_hash: BlockHash) { - self.block_hash.get_or_init(|| block_hash); - } - - // This method is not intended to be used by third party crates. - #[doc(hidden)] - #[cfg(feature = "json-schema")] - pub fn example() -> &'static Self { - &BLOCK_HEADER_V2 - } - - #[cfg(test)] - pub(crate) fn set_body_hash(&mut self, new_body_hash: Digest) { - self.body_hash = new_body_hash; - } -} - -impl PartialEq for BlockHeaderV2 { - fn eq(&self, other: &BlockHeaderV2) -> bool { - // Destructure to make sure we don't accidentally omit fields. - #[cfg(any(feature = "once_cell", test))] - let BlockHeaderV2 { - parent_hash, - state_root_hash, - body_hash, - random_bit, - accumulated_seed, - era_end, - timestamp, - era_id, - height, - protocol_version, - block_hash: _, - } = self; - #[cfg(not(any(feature = "once_cell", test)))] - let BlockHeaderV2 { - parent_hash, - state_root_hash, - body_hash, - random_bit, - accumulated_seed, - era_end, - timestamp, - era_id, - height, - protocol_version, - } = self; - *parent_hash == other.parent_hash - && *state_root_hash == other.state_root_hash - && *body_hash == other.body_hash - && *random_bit == other.random_bit - && *accumulated_seed == other.accumulated_seed - && *era_end == other.era_end - && *timestamp == other.timestamp - && *era_id == other.era_id - && *height == other.height - && *protocol_version == other.protocol_version - } -} - -impl Display for BlockHeaderV2 { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - write!( - formatter, - "block header #{}, {}, timestamp {}, {}, parent {}, post-state hash {}, body hash {}, \ - random bit {}, protocol version: {}", - self.height, - self.block_hash(), - self.timestamp, - self.era_id, - self.parent_hash.inner(), - self.state_root_hash, - self.body_hash, - self.random_bit, - self.protocol_version, - )?; - if let Some(era_end) = &self.era_end { - write!(formatter, ", era_end: {}", era_end)?; - } - Ok(()) - } -} - -impl ToBytes for BlockHeaderV2 { - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.parent_hash.write_bytes(writer)?; - self.state_root_hash.write_bytes(writer)?; - self.body_hash.write_bytes(writer)?; - self.random_bit.write_bytes(writer)?; - self.accumulated_seed.write_bytes(writer)?; - self.era_end.write_bytes(writer)?; - self.timestamp.write_bytes(writer)?; - self.era_id.write_bytes(writer)?; - self.height.write_bytes(writer)?; - self.protocol_version.write_bytes(writer) - } - - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - self.parent_hash.serialized_length() - + self.state_root_hash.serialized_length() - + self.body_hash.serialized_length() - + self.random_bit.serialized_length() - + self.accumulated_seed.serialized_length() - + self.era_end.serialized_length() - + self.timestamp.serialized_length() - + self.era_id.serialized_length() - + self.height.serialized_length() - + self.protocol_version.serialized_length() - } -} - -impl FromBytes for BlockHeaderV2 { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (parent_hash, remainder) = BlockHash::from_bytes(bytes)?; - let (state_root_hash, remainder) = Digest::from_bytes(remainder)?; - let (body_hash, remainder) = Digest::from_bytes(remainder)?; - let (random_bit, remainder) = bool::from_bytes(remainder)?; - let (accumulated_seed, remainder) = Digest::from_bytes(remainder)?; - let (era_end, remainder) = Option::from_bytes(remainder)?; - let (timestamp, remainder) = Timestamp::from_bytes(remainder)?; - let (era_id, remainder) = EraId::from_bytes(remainder)?; - let (height, remainder) = u64::from_bytes(remainder)?; - let (protocol_version, remainder) = ProtocolVersion::from_bytes(remainder)?; - let block_header = BlockHeaderV2 { - parent_hash, - state_root_hash, - body_hash, - random_bit, - accumulated_seed, - era_end, - timestamp, - era_id, - height, - protocol_version, - #[cfg(any(feature = "once_cell", test))] - block_hash: OnceCell::new(), - }; - Ok((block_header, remainder)) - } -} diff --git a/casper_types_ver_2_0/src/block/block_identifier.rs b/casper_types_ver_2_0/src/block/block_identifier.rs deleted file mode 100644 index 02508bdd..00000000 --- a/casper_types_ver_2_0/src/block/block_identifier.rs +++ /dev/null @@ -1,138 +0,0 @@ -use alloc::vec::Vec; -use core::num::ParseIntError; -#[cfg(test)] -use rand::Rng; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -#[cfg(test)] -use crate::testing::TestRng; -use crate::{ - bytesrepr::{self, FromBytes, ToBytes, U8_SERIALIZED_LENGTH}, - BlockHash, Digest, DigestError, -}; - -const HASH_TAG: u8 = 0; -const HEIGHT_TAG: u8 = 1; - -/// Identifier for possible ways to retrieve a block. -#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq)] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[serde(deny_unknown_fields)] -pub enum BlockIdentifier { - /// Identify and retrieve the block with its hash. - Hash(BlockHash), - /// Identify and retrieve the block with its height. - Height(u64), -} - -impl BlockIdentifier { - #[cfg(test)] - pub(crate) fn random(rng: &mut TestRng) -> Self { - match rng.gen_range(0..1) { - 0 => Self::Hash(BlockHash::random(rng)), - 1 => Self::Height(rng.gen()), - _ => panic!(), - } - } -} - -impl FromBytes for BlockIdentifier { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - match bytes.split_first() { - Some((&HASH_TAG, rem)) => { - let (hash, rem) = FromBytes::from_bytes(rem)?; - Ok((BlockIdentifier::Hash(hash), rem)) - } - Some((&HEIGHT_TAG, rem)) => { - let (height, rem) = FromBytes::from_bytes(rem)?; - Ok((BlockIdentifier::Height(height), rem)) - } - Some(_) | None => Err(bytesrepr::Error::Formatting), - } - } -} - -impl ToBytes for BlockIdentifier { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - match self { - BlockIdentifier::Hash(hash) => { - writer.push(HASH_TAG); - hash.write_bytes(writer)?; - } - BlockIdentifier::Height(height) => { - writer.push(HEIGHT_TAG); - height.write_bytes(writer)?; - } - } - Ok(()) - } - - fn serialized_length(&self) -> usize { - U8_SERIALIZED_LENGTH - + match self { - BlockIdentifier::Hash(hash) => hash.serialized_length(), - BlockIdentifier::Height(height) => height.serialized_length(), - } - } -} - -impl core::str::FromStr for BlockIdentifier { - type Err = ParseBlockIdentifierError; - - fn from_str(maybe_block_identifier: &str) -> Result { - if maybe_block_identifier.is_empty() { - return Err(ParseBlockIdentifierError::EmptyString); - } - - if maybe_block_identifier.len() == (Digest::LENGTH * 2) { - let hash = Digest::from_hex(maybe_block_identifier) - .map_err(ParseBlockIdentifierError::FromHexError)?; - Ok(BlockIdentifier::Hash(BlockHash::new(hash))) - } else { - let height = maybe_block_identifier - .parse() - .map_err(ParseBlockIdentifierError::ParseIntError)?; - Ok(BlockIdentifier::Height(height)) - } - } -} - -/// Represents errors that can arise when parsing a [`BlockIdentifier`]. -#[derive(Debug)] -#[cfg_attr(feature = "std", derive(thiserror::Error))] -pub enum ParseBlockIdentifierError { - /// String was empty. - #[cfg_attr( - feature = "std", - error("Empty string is not a valid block identifier.") - )] - EmptyString, - /// Couldn't parse a height value. - #[cfg_attr(feature = "std", error("Unable to parse height from string. {0}"))] - ParseIntError(ParseIntError), - /// Couldn't parse a blake2bhash. - #[cfg_attr(feature = "std", error("Unable to parse digest from string. {0}"))] - FromHexError(DigestError), -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::testing::TestRng; - - #[test] - fn bytesrepr_roundtrip() { - let rng = &mut TestRng::new(); - - let val = BlockIdentifier::random(rng); - bytesrepr::test_serialization_roundtrip(&val); - } -} diff --git a/casper_types_ver_2_0/src/block/block_signatures.rs b/casper_types_ver_2_0/src/block/block_signatures.rs deleted file mode 100644 index 63060652..00000000 --- a/casper_types_ver_2_0/src/block/block_signatures.rs +++ /dev/null @@ -1,248 +0,0 @@ -use alloc::collections::BTreeMap; -use alloc::vec::Vec; -use core::fmt::{self, Display, Formatter}; -#[cfg(feature = "std")] -use std::error::Error as StdError; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(any(feature = "once_cell", test))] -use once_cell::sync::OnceCell; -#[cfg(any(feature = "std", test))] -use serde::{Deserialize, Serialize}; - -use super::{BlockHash, FinalitySignature}; -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - crypto, EraId, PublicKey, Signature, -}; - -/// An error returned during an attempt to merge two incompatible [`BlockSignatures`]. -#[derive(Copy, Clone, Eq, PartialEq, Debug)] -#[non_exhaustive] -pub enum BlockSignaturesMergeError { - /// A mismatch between block hashes. - BlockHashMismatch { - /// The `self` hash. - self_hash: BlockHash, - /// The `other` hash. - other_hash: BlockHash, - }, - /// A mismatch between era IDs. - EraIdMismatch { - /// The `self` era ID. - self_era_id: EraId, - /// The `other` era ID. - other_era_id: EraId, - }, -} - -impl Display for BlockSignaturesMergeError { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - match self { - BlockSignaturesMergeError::BlockHashMismatch { - self_hash, - other_hash, - } => { - write!( - formatter, - "mismatch between block hashes while merging block signatures - self: {}, \ - other: {}", - self_hash, other_hash - ) - } - BlockSignaturesMergeError::EraIdMismatch { - self_era_id, - other_era_id, - } => { - write!( - formatter, - "mismatch between era ids while merging block signatures - self: {}, other: \ - {}", - self_era_id, other_era_id - ) - } - } - } -} - -#[cfg(feature = "std")] -impl StdError for BlockSignaturesMergeError {} - -/// A collection of signatures for a single block, along with the associated block's hash and era -/// ID. -#[derive(Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Debug)] -#[cfg_attr(any(feature = "std", test), derive(Serialize, Deserialize))] -#[cfg_attr(feature = "datasize", derive(DataSize))] -pub struct BlockSignatures { - /// The block hash. - pub(super) block_hash: BlockHash, - /// The era ID in which this block was created. - pub(super) era_id: EraId, - /// The proofs of the block, i.e. a collection of validators' signatures of the block hash. - pub(super) proofs: BTreeMap, -} - -impl BlockSignatures { - /// Constructs a new `BlockSignatures`. - pub fn new(block_hash: BlockHash, era_id: EraId) -> Self { - BlockSignatures { - block_hash, - era_id, - proofs: BTreeMap::new(), - } - } - - /// Returns the block hash of the associated block. - pub fn block_hash(&self) -> &BlockHash { - &self.block_hash - } - - /// Returns the era id of the associated block. - pub fn era_id(&self) -> EraId { - self.era_id - } - - /// Returns the finality signature associated with the given public key, if available. - pub fn finality_signature(&self, public_key: &PublicKey) -> Option { - self.proofs - .get(public_key) - .map(|signature| FinalitySignature { - block_hash: self.block_hash, - era_id: self.era_id, - signature: *signature, - public_key: public_key.clone(), - #[cfg(any(feature = "once_cell", test))] - is_verified: OnceCell::new(), - }) - } - - /// Returns `true` if there is a signature associated with the given public key. - pub fn has_finality_signature(&self, public_key: &PublicKey) -> bool { - self.proofs.contains_key(public_key) - } - - /// Returns an iterator over all the signatures. - pub fn finality_signatures(&self) -> impl Iterator + '_ { - self.proofs - .iter() - .map(move |(public_key, signature)| FinalitySignature { - block_hash: self.block_hash, - era_id: self.era_id, - signature: *signature, - public_key: public_key.clone(), - #[cfg(any(feature = "once_cell", test))] - is_verified: OnceCell::new(), - }) - } - - /// Returns an iterator over all the validator public keys. - pub fn signers(&self) -> impl Iterator + '_ { - self.proofs.keys() - } - - /// Returns the number of signatures in the collection. - pub fn len(&self) -> usize { - self.proofs.len() - } - - /// Returns `true` if there are no signatures in the collection. - pub fn is_empty(&self) -> bool { - self.proofs.is_empty() - } - - /// Inserts a new signature. - pub fn insert_signature(&mut self, finality_signature: FinalitySignature) { - let _ = self - .proofs - .insert(finality_signature.public_key, finality_signature.signature); - } - - /// Merges the collection of signatures in `other` into `self`. - /// - /// Returns an error if the block hashes or era IDs do not match. - pub fn merge(&mut self, mut other: Self) -> Result<(), BlockSignaturesMergeError> { - if self.block_hash != other.block_hash { - return Err(BlockSignaturesMergeError::BlockHashMismatch { - self_hash: self.block_hash, - other_hash: other.block_hash, - }); - } - - if self.era_id != other.era_id { - return Err(BlockSignaturesMergeError::EraIdMismatch { - self_era_id: self.era_id, - other_era_id: other.era_id, - }); - } - - self.proofs.append(&mut other.proofs); - - Ok(()) - } - - /// Returns `Ok` if and only if all the signatures are cryptographically valid. - pub fn is_verified(&self) -> Result<(), crypto::Error> { - for (public_key, signature) in self.proofs.iter() { - let signature = FinalitySignature { - block_hash: self.block_hash, - era_id: self.era_id, - signature: *signature, - public_key: public_key.clone(), - #[cfg(any(feature = "once_cell", test))] - is_verified: OnceCell::new(), - }; - signature.is_verified()?; - } - Ok(()) - } -} - -impl FromBytes for BlockSignatures { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), crate::bytesrepr::Error> { - let (block_hash, bytes) = FromBytes::from_bytes(bytes)?; - let (era_id, bytes) = FromBytes::from_bytes(bytes)?; - let (proofs, bytes) = FromBytes::from_bytes(bytes)?; - Ok(( - BlockSignatures { - block_hash, - era_id, - proofs, - }, - bytes, - )) - } -} - -impl ToBytes for BlockSignatures { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buf = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buf)?; - Ok(buf) - } - - fn write_bytes(&self, bytes: &mut Vec) -> Result<(), crate::bytesrepr::Error> { - self.block_hash.write_bytes(bytes)?; - self.era_id.write_bytes(bytes)?; - self.proofs.write_bytes(bytes)?; - Ok(()) - } - - fn serialized_length(&self) -> usize { - self.block_hash.serialized_length() - + self.era_id.serialized_length() - + self.proofs.serialized_length() - } -} - -impl Display for BlockSignatures { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - write!( - formatter, - "block signatures for {} in {} with {} proofs", - self.block_hash, - self.era_id, - self.proofs.len() - ) - } -} diff --git a/casper_types_ver_2_0/src/block/block_sync_status.rs b/casper_types_ver_2_0/src/block/block_sync_status.rs deleted file mode 100644 index 6c842824..00000000 --- a/casper_types_ver_2_0/src/block/block_sync_status.rs +++ /dev/null @@ -1,212 +0,0 @@ -use alloc::{string::String, vec::Vec}; -#[cfg(feature = "json-schema")] -use once_cell::sync::Lazy; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - BlockHash, -}; - -#[cfg(test)] -use rand::Rng; - -#[cfg(test)] -use crate::testing::TestRng; - -#[cfg(feature = "json-schema")] -static BLOCK_SYNCHRONIZER_STATUS: Lazy = Lazy::new(|| { - use crate::Digest; - - BlockSynchronizerStatus::new( - Some(BlockSyncStatus { - block_hash: BlockHash::new( - Digest::from_hex( - "16ddf28e2b3d2e17f4cef36f8b58827eca917af225d139b0c77df3b4a67dc55e", - ) - .unwrap(), - ), - block_height: Some(40), - acquisition_state: "have strict finality(40) for: block hash 16dd..c55e".to_string(), - }), - Some(BlockSyncStatus { - block_hash: BlockHash::new( - Digest::from_hex( - "59907b1e32a9158169c4d89d9ce5ac9164fc31240bfcfb0969227ece06d74983", - ) - .unwrap(), - ), - block_height: Some(6701), - acquisition_state: "have block body(6701) for: block hash 5990..4983".to_string(), - }), - ) -}); - -/// The status of syncing an individual block. -#[derive(Clone, Default, PartialEq, Eq, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[serde(deny_unknown_fields)] -pub struct BlockSyncStatus { - /// The block hash. - block_hash: BlockHash, - /// The height of the block, if known. - block_height: Option, - /// The state of acquisition of the data associated with the block. - acquisition_state: String, -} - -impl BlockSyncStatus { - /// Constructs a new `BlockSyncStatus`. - pub fn new( - block_hash: BlockHash, - block_height: Option, - acquisition_state: String, - ) -> Self { - Self { - block_hash, - block_height, - acquisition_state, - } - } - - #[cfg(test)] - pub(crate) fn random(rng: &mut TestRng) -> Self { - Self { - block_hash: BlockHash::random(rng), - block_height: rng.gen::().then_some(rng.gen()), - acquisition_state: rng.random_string(10..20), - } - } -} - -impl ToBytes for BlockSyncStatus { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.block_hash.write_bytes(writer)?; - self.block_height.write_bytes(writer)?; - self.acquisition_state.write_bytes(writer) - } - - fn serialized_length(&self) -> usize { - self.block_hash.serialized_length() - + self.block_height.serialized_length() - + self.acquisition_state.serialized_length() - } -} - -impl FromBytes for BlockSyncStatus { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (block_hash, remainder) = BlockHash::from_bytes(bytes)?; - let (block_height, remainder) = Option::::from_bytes(remainder)?; - let (acquisition_state, remainder) = String::from_bytes(remainder)?; - Ok(( - BlockSyncStatus { - block_hash, - block_height, - acquisition_state, - }, - remainder, - )) - } -} - -/// The status of the block synchronizer. -#[derive(Clone, Default, PartialEq, Eq, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[serde(deny_unknown_fields)] -pub struct BlockSynchronizerStatus { - /// The status of syncing a historical block, if any. - historical: Option, - /// The status of syncing a forward block, if any. - forward: Option, -} - -impl BlockSynchronizerStatus { - /// Constructs a new `BlockSynchronizerStatus`. - pub fn new(historical: Option, forward: Option) -> Self { - Self { - historical, - forward, - } - } - - /// Returns an example `BlockSynchronizerStatus`. - #[cfg(feature = "json-schema")] - pub fn example() -> &'static Self { - &BLOCK_SYNCHRONIZER_STATUS - } - - #[cfg(test)] - pub(crate) fn random(rng: &mut TestRng) -> Self { - let historical = rng.gen::().then_some(BlockSyncStatus::random(rng)); - let forward = rng.gen::().then_some(BlockSyncStatus::random(rng)); - Self { - historical, - forward, - } - } - - /// Returns status of the historical block sync. - #[cfg(any(feature = "testing", test))] - pub fn historical(&self) -> &Option { - &self.historical - } - - /// Returns status of the forward block sync. - #[cfg(any(feature = "testing", test))] - pub fn forward(&self) -> &Option { - &self.forward - } -} - -impl ToBytes for BlockSynchronizerStatus { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.historical.write_bytes(writer)?; - self.forward.write_bytes(writer) - } - - fn serialized_length(&self) -> usize { - self.historical.serialized_length() + self.forward.serialized_length() - } -} - -impl FromBytes for BlockSynchronizerStatus { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (historical, remainder) = Option::::from_bytes(bytes)?; - let (forward, remainder) = Option::::from_bytes(remainder)?; - Ok(( - BlockSynchronizerStatus { - historical, - forward, - }, - remainder, - )) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::testing::TestRng; - - #[test] - fn bytesrepr_roundtrip() { - let rng = &mut TestRng::new(); - - let val = BlockSyncStatus::random(rng); - bytesrepr::test_serialization_roundtrip(&val); - } -} diff --git a/casper_types_ver_2_0/src/block/block_v1.rs b/casper_types_ver_2_0/src/block/block_v1.rs deleted file mode 100644 index 9592be34..00000000 --- a/casper_types_ver_2_0/src/block/block_v1.rs +++ /dev/null @@ -1,367 +0,0 @@ -#[cfg(any(all(feature = "std", feature = "testing"), test))] -use alloc::collections::BTreeMap; -use alloc::{boxed::Box, vec::Vec}; -use core::fmt::{self, Display, Formatter}; -#[cfg(any(all(feature = "std", feature = "testing"), test))] -use core::iter; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -#[cfg(any(feature = "std", test))] -use serde::{Deserialize, Serialize}; - -#[cfg(any(feature = "once_cell", test))] -use once_cell::sync::OnceCell; -#[cfg(any(all(feature = "std", feature = "testing"), test))] -use rand::Rng; - -#[cfg(any(all(feature = "std", feature = "testing"), test))] -use crate::U512; -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - Block, BlockBodyV1, BlockHash, BlockHeaderV1, BlockValidationError, DeployHash, Digest, - EraEndV1, EraId, ProtocolVersion, PublicKey, Timestamp, -}; -#[cfg(any(all(feature = "std", feature = "testing"), test))] -use crate::{testing::TestRng, EraReport}; - -/// A block after execution, with the resulting global state root hash. This is the core component -/// of the Casper linear blockchain. Version 1. -#[cfg_attr(any(feature = "std", test), derive(Serialize, Deserialize))] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[derive(Clone, Eq, PartialEq, Debug)] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -pub struct BlockV1 { - /// The block hash identifying this block. - pub(super) hash: BlockHash, - /// The header portion of the block. - pub(super) header: BlockHeaderV1, - /// The body portion of the block. - pub(super) body: BlockBodyV1, -} - -impl BlockV1 { - // This method is not intended to be used by third party crates. - #[doc(hidden)] - #[allow(clippy::too_many_arguments)] - pub fn new( - parent_hash: BlockHash, - parent_seed: Digest, - state_root_hash: Digest, - random_bit: bool, - era_end: Option, - timestamp: Timestamp, - era_id: EraId, - height: u64, - protocol_version: ProtocolVersion, - proposer: PublicKey, - deploy_hashes: Vec, - transfer_hashes: Vec, - ) -> Self { - let body = BlockBodyV1::new(proposer, deploy_hashes, transfer_hashes); - let body_hash = body.hash(); - let accumulated_seed = Digest::hash_pair(parent_seed, [random_bit as u8]); - let header = BlockHeaderV1::new( - parent_hash, - state_root_hash, - body_hash, - random_bit, - accumulated_seed, - era_end, - timestamp, - era_id, - height, - protocol_version, - #[cfg(any(feature = "once_cell", test))] - OnceCell::new(), - ); - Self::new_from_header_and_body(header, body) - } - - // This method is not intended to be used by third party crates. - #[doc(hidden)] - pub fn new_from_header_and_body(header: BlockHeaderV1, body: BlockBodyV1) -> Self { - let hash = header.block_hash(); - BlockV1 { hash, header, body } - } - - /// Returns the `BlockHash` identifying this block. - pub fn hash(&self) -> &BlockHash { - &self.hash - } - - /// Returns the block's header. - pub fn header(&self) -> &BlockHeaderV1 { - &self.header - } - - /// Returns the block's header, consuming `self`. - pub fn take_header(self) -> BlockHeaderV1 { - self.header - } - - /// Returns the block's body. - pub fn body(&self) -> &BlockBodyV1 { - &self.body - } - - /// Returns the parent block's hash. - pub fn parent_hash(&self) -> &BlockHash { - self.header.parent_hash() - } - - /// Returns the root hash of global state after the deploys in this block have been executed. - pub fn state_root_hash(&self) -> &Digest { - self.header.state_root_hash() - } - - /// Returns the hash of the block's body. - pub fn body_hash(&self) -> &Digest { - self.header.body_hash() - } - - /// Returns a random bit needed for initializing a future era. - pub fn random_bit(&self) -> bool { - self.header.random_bit() - } - - /// Returns a seed needed for initializing a future era. - pub fn accumulated_seed(&self) -> &Digest { - self.header.accumulated_seed() - } - - /// Returns the `EraEnd` of a block if it is a switch block. - pub fn era_end(&self) -> Option<&EraEndV1> { - self.header.era_end() - } - - /// Returns the timestamp from when the block was proposed. - pub fn timestamp(&self) -> Timestamp { - self.header.timestamp() - } - - /// Returns the era ID in which this block was created. - pub fn era_id(&self) -> EraId { - self.header.era_id() - } - - /// Returns the height of this block, i.e. the number of ancestors. - pub fn height(&self) -> u64 { - self.header.height() - } - - /// Returns the protocol version of the network from when this block was created. - pub fn protocol_version(&self) -> ProtocolVersion { - self.header.protocol_version() - } - - /// Returns `true` if this block is the last one in the current era. - pub fn is_switch_block(&self) -> bool { - self.header.is_switch_block() - } - - /// Returns `true` if this block is the Genesis block, i.e. has height 0 and era 0. - pub fn is_genesis(&self) -> bool { - self.header.is_genesis() - } - - /// Returns the public key of the validator which proposed the block. - pub fn proposer(&self) -> &PublicKey { - self.body.proposer() - } - - /// Returns the deploy hashes within the block. - pub fn deploy_hashes(&self) -> &[DeployHash] { - self.body.deploy_hashes() - } - - /// Returns the transfer hashes within the block. - pub fn transfer_hashes(&self) -> &[DeployHash] { - self.body.transfer_hashes() - } - - /// Returns the deploy and transfer hashes in the order in which they were executed. - pub fn deploy_and_transfer_hashes(&self) -> impl Iterator { - self.deploy_hashes() - .iter() - .chain(self.transfer_hashes().iter()) - } - - /// Returns `Ok` if and only if the block's provided block hash and body hash are identical to - /// those generated by hashing the appropriate input data. - pub fn verify(&self) -> Result<(), BlockValidationError> { - let actual_block_header_hash = self.header().block_hash(); - if *self.hash() != actual_block_header_hash { - return Err(BlockValidationError::UnexpectedBlockHash { - block: Box::new(Block::V1(self.clone())), - actual_block_hash: actual_block_header_hash, - }); - } - - let actual_block_body_hash = self.body.hash(); - if *self.header.body_hash() != actual_block_body_hash { - return Err(BlockValidationError::UnexpectedBodyHash { - block: Box::new(Block::V1(self.clone())), - actual_block_body_hash, - }); - } - - Ok(()) - } - - /// Returns a random block, but using the provided values. - /// - /// If `deploy_hashes_iter` is empty, a few random deploy hashes will be added to the - /// `deploy_hashes` and `transfer_hashes` fields of the body. Otherwise, the provided deploy - /// hashes will populate the `deploy_hashes` field and `transfer_hashes` will be empty. - #[cfg(any(all(feature = "std", feature = "testing"), test))] - pub fn random_with_specifics>( - rng: &mut TestRng, - era_id: EraId, - height: u64, - protocol_version: ProtocolVersion, - is_switch: bool, - deploy_hashes_iter: I, - ) -> Self { - let parent_hash = BlockHash::random(rng); - let parent_seed = Digest::random(rng); - let state_root_hash = Digest::random(rng); - let random_bit = rng.gen(); - let era_end = is_switch.then(|| { - let mut next_era_validator_weights = BTreeMap::new(); - for i in 1_u64..6 { - let _ = next_era_validator_weights.insert(PublicKey::random(rng), U512::from(i)); - } - EraEndV1::new(EraReport::random(rng), next_era_validator_weights) - }); - let timestamp = Timestamp::now(); - let proposer = PublicKey::random(rng); - let mut deploy_hashes: Vec = deploy_hashes_iter.into_iter().collect(); - let mut transfer_hashes: Vec = vec![]; - if deploy_hashes.is_empty() { - let count = rng.gen_range(0..6); - deploy_hashes = iter::repeat_with(|| DeployHash::random(rng)) - .take(count) - .collect(); - let count = rng.gen_range(0..6); - transfer_hashes = iter::repeat_with(|| DeployHash::random(rng)) - .take(count) - .collect(); - } - - BlockV1::new( - parent_hash, - parent_seed, - state_root_hash, - random_bit, - era_end, - timestamp, - era_id, - height, - protocol_version, - proposer, - deploy_hashes, - transfer_hashes, - ) - } -} - -impl Display for BlockV1 { - fn fmt(&self, formatter: &mut Formatter<'_>) -> fmt::Result { - write!( - formatter, - "executed block #{}, {}, timestamp {}, {}, parent {}, post-state hash {}, body hash \ - {}, random bit {}, protocol version: {}", - self.height(), - self.hash(), - self.timestamp(), - self.era_id(), - self.parent_hash().inner(), - self.state_root_hash(), - self.body_hash(), - self.random_bit(), - self.protocol_version() - )?; - if let Some(era_end) = self.era_end() { - write!(formatter, ", era_end: {}", era_end)?; - } - Ok(()) - } -} - -impl ToBytes for BlockV1 { - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.hash.write_bytes(writer)?; - self.header.write_bytes(writer)?; - self.body.write_bytes(writer) - } - - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - self.hash.serialized_length() - + self.header.serialized_length() - + self.body.serialized_length() - } -} - -impl FromBytes for BlockV1 { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (hash, remainder) = BlockHash::from_bytes(bytes)?; - let (header, remainder) = BlockHeaderV1::from_bytes(remainder)?; - let (body, remainder) = BlockBodyV1::from_bytes(remainder)?; - let block = BlockV1 { hash, header, body }; - Ok((block, remainder)) - } -} - -#[cfg(test)] -mod tests { - use crate::{Block, TestBlockV1Builder}; - - use super::*; - - #[test] - fn bytesrepr_roundtrip() { - let rng = &mut TestRng::new(); - let block = TestBlockV1Builder::new().build(rng); - bytesrepr::test_serialization_roundtrip(&block); - } - - #[test] - fn block_check_bad_body_hash_sad_path() { - let rng = &mut TestRng::new(); - - let mut block = TestBlockV1Builder::new().build(rng); - let bogus_block_body_hash = Digest::hash([0xde, 0xad, 0xbe, 0xef]); - block.header.set_body_hash(bogus_block_body_hash); - block.hash = block.header.block_hash(); - - let expected_error = BlockValidationError::UnexpectedBodyHash { - block: Box::new(Block::V1(block.clone())), - actual_block_body_hash: block.body.hash(), - }; - assert_eq!(block.verify(), Err(expected_error)); - } - - #[test] - fn block_check_bad_block_hash_sad_path() { - let rng = &mut TestRng::new(); - - let mut block = TestBlockV1Builder::new().build(rng); - let bogus_block_hash = BlockHash::from(Digest::hash([0xde, 0xad, 0xbe, 0xef])); - block.hash = bogus_block_hash; - - let expected_error = BlockValidationError::UnexpectedBlockHash { - block: Box::new(Block::V1(block.clone())), - actual_block_hash: block.header.block_hash(), - }; - assert_eq!(block.verify(), Err(expected_error)); - } -} diff --git a/casper_types_ver_2_0/src/block/block_v2.rs b/casper_types_ver_2_0/src/block/block_v2.rs deleted file mode 100644 index c80f9213..00000000 --- a/casper_types_ver_2_0/src/block/block_v2.rs +++ /dev/null @@ -1,411 +0,0 @@ -use alloc::{boxed::Box, vec::Vec}; - -use core::{ - convert::TryFrom, - fmt::{self, Display, Formatter}, -}; -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(feature = "json-schema")] -use once_cell::sync::Lazy; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -#[cfg(any(feature = "std", test))] -use serde::{Deserialize, Serialize}; - -#[cfg(any(feature = "once_cell", test))] -use once_cell::sync::OnceCell; - -use super::{Block, BlockBodyV2, BlockConversionError, RewardedSignatures}; -#[cfg(any(all(feature = "std", feature = "testing"), test))] -use crate::testing::TestRng; -#[cfg(feature = "json-schema")] -use crate::TransactionV1Hash; -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - BlockHash, BlockHeaderV2, BlockValidationError, Digest, EraEndV2, EraId, ProtocolVersion, - PublicKey, Timestamp, TransactionHash, -}; - -#[cfg(feature = "json-schema")] -static BLOCK_V2: Lazy = Lazy::new(|| { - let parent_hash = BlockHash::new(Digest::from([7; Digest::LENGTH])); - let parent_seed = Digest::from([9; Digest::LENGTH]); - let state_root_hash = Digest::from([8; Digest::LENGTH]); - let random_bit = true; - let era_end = Some(EraEndV2::example().clone()); - let timestamp = *Timestamp::example(); - let era_id = EraId::from(1); - let height = 10; - let protocol_version = ProtocolVersion::V1_0_0; - let secret_key = crate::SecretKey::example(); - let proposer = PublicKey::from(secret_key); - let transfer_hashes = vec![TransactionHash::V1(TransactionV1Hash::new(Digest::from( - [20; Digest::LENGTH], - )))]; - let non_transfer_native_hashes = vec![TransactionHash::V1(TransactionV1Hash::new( - Digest::from([21; Digest::LENGTH]), - ))]; - let installer_upgrader_hashes = vec![TransactionHash::V1(TransactionV1Hash::new( - Digest::from([22; Digest::LENGTH]), - ))]; - let other_hashes = vec![TransactionHash::V1(TransactionV1Hash::new(Digest::from( - [23; Digest::LENGTH], - )))]; - let rewarded_signatures = RewardedSignatures::default(); - BlockV2::new( - parent_hash, - parent_seed, - state_root_hash, - random_bit, - era_end, - timestamp, - era_id, - height, - protocol_version, - proposer, - transfer_hashes, - non_transfer_native_hashes, - installer_upgrader_hashes, - other_hashes, - rewarded_signatures, - ) -}); - -/// A block after execution, with the resulting global state root hash. This is the core component -/// of the Casper linear blockchain. Version 2. -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(any(feature = "std", test), derive(Serialize, Deserialize))] -#[derive(Clone, Debug, PartialEq, Eq)] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -pub struct BlockV2 { - /// The block hash identifying this block. - pub(super) hash: BlockHash, - /// The header portion of the block. - pub(super) header: BlockHeaderV2, - /// The body portion of the block. - pub(super) body: BlockBodyV2, -} - -impl BlockV2 { - // This method is not intended to be used by third party crates. - #[doc(hidden)] - #[allow(clippy::too_many_arguments)] - pub fn new( - parent_hash: BlockHash, - parent_seed: Digest, - state_root_hash: Digest, - random_bit: bool, - era_end: Option, - timestamp: Timestamp, - era_id: EraId, - height: u64, - protocol_version: ProtocolVersion, - proposer: PublicKey, - transfer: Vec, - staking: Vec, - install_upgrade: Vec, - standard: Vec, - rewarded_signatures: RewardedSignatures, - ) -> Self { - let body = BlockBodyV2::new( - proposer, - transfer, - staking, - install_upgrade, - standard, - rewarded_signatures, - ); - let body_hash = body.hash(); - let accumulated_seed = Digest::hash_pair(parent_seed, [random_bit as u8]); - let header = BlockHeaderV2::new( - parent_hash, - state_root_hash, - body_hash, - random_bit, - accumulated_seed, - era_end, - timestamp, - era_id, - height, - protocol_version, - #[cfg(any(feature = "once_cell", test))] - OnceCell::new(), - ); - Self::new_from_header_and_body(header, body) - } - - // This method is not intended to be used by third party crates. - #[doc(hidden)] - pub fn new_from_header_and_body(header: BlockHeaderV2, body: BlockBodyV2) -> Self { - let hash = header.block_hash(); - BlockV2 { hash, header, body } - } - - /// Returns the `BlockHash` identifying this block. - pub fn hash(&self) -> &BlockHash { - &self.hash - } - - /// Returns the block's header. - pub fn header(&self) -> &BlockHeaderV2 { - &self.header - } - - /// Returns the block's header, consuming `self`. - pub fn take_header(self) -> BlockHeaderV2 { - self.header - } - - /// Returns the block's body. - pub fn body(&self) -> &BlockBodyV2 { - &self.body - } - - /// Returns the parent block's hash. - pub fn parent_hash(&self) -> &BlockHash { - self.header.parent_hash() - } - - /// Returns the root hash of global state after the deploys in this block have been executed. - pub fn state_root_hash(&self) -> &Digest { - self.header.state_root_hash() - } - - /// Returns the hash of the block's body. - pub fn body_hash(&self) -> &Digest { - self.header.body_hash() - } - - /// Returns a random bit needed for initializing a future era. - pub fn random_bit(&self) -> bool { - self.header.random_bit() - } - - /// Returns a seed needed for initializing a future era. - pub fn accumulated_seed(&self) -> &Digest { - self.header.accumulated_seed() - } - - /// Returns the `EraEnd` of a block if it is a switch block. - pub fn era_end(&self) -> Option<&EraEndV2> { - self.header.era_end() - } - - /// Returns the timestamp from when the block was proposed. - pub fn timestamp(&self) -> Timestamp { - self.header.timestamp() - } - - /// Returns the era ID in which this block was created. - pub fn era_id(&self) -> EraId { - self.header.era_id() - } - - /// Returns the height of this block, i.e. the number of ancestors. - pub fn height(&self) -> u64 { - self.header.height() - } - - /// Returns the protocol version of the network from when this block was created. - pub fn protocol_version(&self) -> ProtocolVersion { - self.header.protocol_version() - } - - /// Returns `true` if this block is the last one in the current era. - pub fn is_switch_block(&self) -> bool { - self.header.is_switch_block() - } - - /// Returns `true` if this block is the Genesis block, i.e. has height 0 and era 0. - pub fn is_genesis(&self) -> bool { - self.header.is_genesis() - } - - /// Returns the public key of the validator which proposed the block. - pub fn proposer(&self) -> &PublicKey { - self.body.proposer() - } - - /// List of identifiers for finality signatures for a particular past block. - pub fn rewarded_signatures(&self) -> &RewardedSignatures { - self.body.rewarded_signatures() - } - - /// Returns the hashes of the transfer transactions within the block. - pub fn transfer(&self) -> impl Iterator { - self.body.transfer() - } - - /// Returns the hashes of the non-transfer, native transactions within the block. - pub fn staking(&self) -> impl Iterator { - self.body.staking() - } - - /// Returns the hashes of the installer/upgrader transactions within the block. - pub fn install_upgrade(&self) -> impl Iterator { - self.body.install_upgrade() - } - - /// Returns the hashes of all other transactions within the block. - pub fn standard(&self) -> impl Iterator { - self.body.standard() - } - - /// Returns all of the transaction hashes in the order in which they were executed. - pub fn all_transactions(&self) -> impl Iterator { - self.body.all_transactions() - } - - /// Returns `Ok` if and only if the block's provided block hash and body hash are identical to - /// those generated by hashing the appropriate input data. - pub fn verify(&self) -> Result<(), BlockValidationError> { - let actual_block_header_hash = self.header().block_hash(); - if *self.hash() != actual_block_header_hash { - return Err(BlockValidationError::UnexpectedBlockHash { - block: Box::new(Block::V2(self.clone())), - actual_block_hash: actual_block_header_hash, - }); - } - - let actual_block_body_hash = self.body.hash(); - if *self.header.body_hash() != actual_block_body_hash { - return Err(BlockValidationError::UnexpectedBodyHash { - block: Box::new(Block::V2(self.clone())), - actual_block_body_hash, - }); - } - - Ok(()) - } - - // This method is not intended to be used by third party crates. - #[doc(hidden)] - #[cfg(feature = "json-schema")] - pub fn example() -> &'static Self { - &BLOCK_V2 - } - - /// Makes the block invalid, for testing purpose. - #[cfg(any(all(feature = "std", feature = "testing"), test))] - pub fn make_invalid(self, rng: &mut TestRng) -> Self { - let block = BlockV2 { - hash: BlockHash::random(rng), - ..self - }; - - assert!(block.verify().is_err()); - block - } -} - -impl Display for BlockV2 { - fn fmt(&self, formatter: &mut Formatter<'_>) -> fmt::Result { - write!( - formatter, - "executed block #{}, {}, timestamp {}, {}, parent {}, post-state hash {}, body hash \ - {}, random bit {}, protocol version: {}", - self.height(), - self.hash(), - self.timestamp(), - self.era_id(), - self.parent_hash().inner(), - self.state_root_hash(), - self.body_hash(), - self.random_bit(), - self.protocol_version() - )?; - if let Some(era_end) = self.era_end() { - write!(formatter, ", era_end: {}", era_end)?; - } - Ok(()) - } -} - -impl ToBytes for BlockV2 { - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.hash.write_bytes(writer)?; - self.header.write_bytes(writer)?; - self.body.write_bytes(writer) - } - - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - self.hash.serialized_length() - + self.header.serialized_length() - + self.body.serialized_length() - } -} - -impl FromBytes for BlockV2 { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (hash, remainder) = BlockHash::from_bytes(bytes)?; - let (header, remainder) = BlockHeaderV2::from_bytes(remainder)?; - let (body, remainder) = BlockBodyV2::from_bytes(remainder)?; - let block = BlockV2 { hash, header, body }; - Ok((block, remainder)) - } -} - -impl TryFrom for BlockV2 { - type Error = BlockConversionError; - - fn try_from(value: Block) -> Result { - match value { - Block::V2(v2) => Ok(v2), - _ => Err(BlockConversionError::DifferentVersion { - expected_version: 2, - }), - } - } -} - -#[cfg(test)] -mod tests { - use crate::TestBlockBuilder; - - use super::*; - - #[test] - fn bytesrepr_roundtrip() { - let rng = &mut TestRng::new(); - let block = TestBlockBuilder::new().build(rng); - bytesrepr::test_serialization_roundtrip(&block); - } - - #[test] - fn block_check_bad_body_hash_sad_path() { - let rng = &mut TestRng::new(); - - let mut block = TestBlockBuilder::new().build(rng); - let bogus_block_body_hash = Digest::hash([0xde, 0xad, 0xbe, 0xef]); - block.header.set_body_hash(bogus_block_body_hash); - block.hash = block.header.block_hash(); - - let expected_error = BlockValidationError::UnexpectedBodyHash { - block: Box::new(Block::V2(block.clone())), - actual_block_body_hash: block.body.hash(), - }; - assert_eq!(block.verify(), Err(expected_error)); - } - - #[test] - fn block_check_bad_block_hash_sad_path() { - let rng = &mut TestRng::new(); - - let mut block = TestBlockBuilder::new().build(rng); - let bogus_block_hash = BlockHash::from(Digest::hash([0xde, 0xad, 0xbe, 0xef])); - block.hash = bogus_block_hash; - - let expected_error = BlockValidationError::UnexpectedBlockHash { - block: Box::new(Block::V2(block.clone())), - actual_block_hash: block.header.block_hash(), - }; - assert_eq!(block.verify(), Err(expected_error)); - } -} diff --git a/casper_types_ver_2_0/src/block/era_end.rs b/casper_types_ver_2_0/src/block/era_end.rs deleted file mode 100644 index 0dcc8813..00000000 --- a/casper_types_ver_2_0/src/block/era_end.rs +++ /dev/null @@ -1,133 +0,0 @@ -mod era_end_v1; -mod era_end_v2; - -use alloc::{collections::BTreeMap, vec::Vec}; -use core::fmt::{self, Display, Formatter}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -use serde::{Deserialize, Serialize}; - -use crate::{ - bytesrepr::{self, FromBytes, ToBytes, U8_SERIALIZED_LENGTH}, - PublicKey, Rewards, U512, -}; -pub use era_end_v1::{EraEndV1, EraReport}; -pub use era_end_v2::EraEndV2; - -const TAG_LENGTH: usize = U8_SERIALIZED_LENGTH; - -/// Tag for block body v1. -pub const ERA_END_V1_TAG: u8 = 0; -/// Tag for block body v2. -pub const ERA_END_V2_TAG: u8 = 1; - -/// The versioned era end of a block, storing the data for a switch block. -/// It encapsulates different variants of the EraEnd struct. -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(any(feature = "testing", test), derive(PartialEq))] -#[derive(Clone, Hash, Serialize, Deserialize, Debug)] -pub enum EraEnd { - /// The legacy, initial version of the body portion of a block. - V1(EraEndV1), - /// The version 2 of the body portion of a block, which includes the - /// `past_finality_signatures`. - V2(EraEndV2), -} - -impl EraEnd { - /// Retrieves the deploy hashes within the block. - pub fn equivocators(&self) -> &[PublicKey] { - match self { - EraEnd::V1(v1) => v1.equivocators(), - EraEnd::V2(v2) => v2.equivocators(), - } - } - - /// Retrieves the transfer hashes within the block. - pub fn inactive_validators(&self) -> &[PublicKey] { - match self { - EraEnd::V1(v1) => v1.inactive_validators(), - EraEnd::V2(v2) => v2.inactive_validators(), - } - } - - /// Returns the deploy and transfer hashes in the order in which they were executed. - pub fn next_era_validator_weights(&self) -> &BTreeMap { - match self { - EraEnd::V1(v1) => v1.next_era_validator_weights(), - EraEnd::V2(v2) => v2.next_era_validator_weights(), - } - } - - /// Returns the deploy and transfer hashes in the order in which they were executed. - pub fn rewards(&self) -> Rewards { - match self { - EraEnd::V1(v1) => Rewards::V1(v1.rewards()), - EraEnd::V2(v2) => Rewards::V2(v2.rewards()), - } - } -} - -impl Display for EraEnd { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - match self { - EraEnd::V1(v1) => Display::fmt(&v1, formatter), - EraEnd::V2(v2) => Display::fmt(&v2, formatter), - } - } -} - -impl From for EraEnd { - fn from(era_end: EraEndV1) -> Self { - EraEnd::V1(era_end) - } -} - -impl From for EraEnd { - fn from(era_end: EraEndV2) -> Self { - EraEnd::V2(era_end) - } -} - -impl ToBytes for EraEnd { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - match self { - EraEnd::V1(v1) => { - buffer.insert(0, ERA_END_V1_TAG); - buffer.extend(v1.to_bytes()?); - } - EraEnd::V2(v2) => { - buffer.insert(0, ERA_END_V2_TAG); - buffer.extend(v2.to_bytes()?); - } - } - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - TAG_LENGTH - + match self { - EraEnd::V1(v1) => v1.serialized_length(), - EraEnd::V2(v2) => v2.serialized_length(), - } - } -} - -impl FromBytes for EraEnd { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (tag, remainder) = u8::from_bytes(bytes)?; - match tag { - ERA_END_V1_TAG => { - let (body, remainder): (EraEndV1, _) = FromBytes::from_bytes(remainder)?; - Ok((Self::V1(body), remainder)) - } - ERA_END_V2_TAG => { - let (body, remainder): (EraEndV2, _) = FromBytes::from_bytes(remainder)?; - Ok((Self::V2(body), remainder)) - } - _ => Err(bytesrepr::Error::Formatting), - } - } -} diff --git a/casper_types_ver_2_0/src/block/era_end/era_end_v1.rs b/casper_types_ver_2_0/src/block/era_end/era_end_v1.rs deleted file mode 100644 index ac89e7f3..00000000 --- a/casper_types_ver_2_0/src/block/era_end/era_end_v1.rs +++ /dev/null @@ -1,163 +0,0 @@ -mod era_report; - -use alloc::{collections::BTreeMap, vec::Vec}; -use core::fmt::{self, Display, Formatter}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(feature = "json-schema")] -use once_cell::sync::Lazy; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; -#[cfg(feature = "json-schema")] -use serde_map_to_array::KeyValueJsonSchema; -use serde_map_to_array::{BTreeMapToArray, KeyValueLabels}; - -#[cfg(feature = "json-schema")] -use crate::SecretKey; -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - PublicKey, U512, -}; -pub use era_report::EraReport; - -#[cfg(feature = "json-schema")] -static ERA_END_V1: Lazy = Lazy::new(|| { - let secret_key_1 = SecretKey::ed25519_from_bytes([0; 32]).unwrap(); - let public_key_1 = PublicKey::from(&secret_key_1); - let next_era_validator_weights = { - let mut next_era_validator_weights: BTreeMap = BTreeMap::new(); - next_era_validator_weights.insert(public_key_1, U512::from(123)); - next_era_validator_weights.insert( - PublicKey::from( - &SecretKey::ed25519_from_bytes([5u8; SecretKey::ED25519_LENGTH]).unwrap(), - ), - U512::from(456), - ); - next_era_validator_weights.insert( - PublicKey::from( - &SecretKey::ed25519_from_bytes([6u8; SecretKey::ED25519_LENGTH]).unwrap(), - ), - U512::from(789), - ); - next_era_validator_weights - }; - - let era_report = EraReport::example().clone(); - EraEndV1::new(era_report, next_era_validator_weights) -}); - -/// Information related to the end of an era, and validator weights for the following era. -#[derive(Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -pub struct EraEndV1 { - /// Equivocation, reward and validator inactivity information. - pub(super) era_report: EraReport, - /// The validators for the upcoming era and their respective weights. - #[serde(with = "BTreeMapToArray::")] - pub(super) next_era_validator_weights: BTreeMap, -} - -impl EraEndV1 { - /// Returns equivocation, reward and validator inactivity information. - pub fn era_report(&self) -> &EraReport { - &self.era_report - } - - /// Retrieves the deploy hashes within the block. - pub fn equivocators(&self) -> &[PublicKey] { - self.era_report.equivocators() - } - - /// Retrieves the transfer hashes within the block. - pub fn inactive_validators(&self) -> &[PublicKey] { - self.era_report.inactive_validators() - } - - /// Retrieves the transfer hashes within the block. - pub fn rewards(&self) -> &BTreeMap { - self.era_report.rewards() - } - - /// Returns the validators for the upcoming era and their respective weights. - pub fn next_era_validator_weights(&self) -> &BTreeMap { - &self.next_era_validator_weights - } - - // This method is not intended to be used by third party crates. - #[doc(hidden)] - pub fn new( - era_report: EraReport, - next_era_validator_weights: BTreeMap, - ) -> Self { - EraEndV1 { - era_report, - next_era_validator_weights, - } - } - - // This method is not intended to be used by third party crates. - #[doc(hidden)] - #[cfg(feature = "json-schema")] - pub fn example() -> &'static Self { - &ERA_END_V1 - } -} - -impl ToBytes for EraEndV1 { - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.era_report.write_bytes(writer)?; - self.next_era_validator_weights.write_bytes(writer)?; - - Ok(()) - } - - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - self.era_report.serialized_length() + self.next_era_validator_weights.serialized_length() - } -} - -impl FromBytes for EraEndV1 { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (era_report, remainder) = EraReport::::from_bytes(bytes)?; - let (next_era_validator_weights, remainder) = - BTreeMap::::from_bytes(remainder)?; - let era_end = EraEndV1 { - era_report, - next_era_validator_weights, - }; - Ok((era_end, remainder)) - } -} - -impl Display for EraEndV1 { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - write!(formatter, "era end: {} ", self.era_report) - } -} - -struct NextEraValidatorLabels; - -impl KeyValueLabels for NextEraValidatorLabels { - const KEY: &'static str = "validator"; - const VALUE: &'static str = "weight"; -} - -#[cfg(feature = "json-schema")] -impl KeyValueJsonSchema for NextEraValidatorLabels { - const JSON_SCHEMA_KV_NAME: Option<&'static str> = Some("ValidatorWeight"); - const JSON_SCHEMA_KV_DESCRIPTION: Option<&'static str> = Some( - "A validator's public key paired with its weight, i.e. the total number of \ - motes staked by it and its delegators.", - ); - const JSON_SCHEMA_KEY_DESCRIPTION: Option<&'static str> = Some("The validator's public key."); - const JSON_SCHEMA_VALUE_DESCRIPTION: Option<&'static str> = Some("The validator's weight."); -} diff --git a/casper_types_ver_2_0/src/block/era_end/era_end_v1/era_report.rs b/casper_types_ver_2_0/src/block/era_end/era_end_v1/era_report.rs deleted file mode 100644 index af63359e..00000000 --- a/casper_types_ver_2_0/src/block/era_end/era_end_v1/era_report.rs +++ /dev/null @@ -1,252 +0,0 @@ -use alloc::{collections::BTreeMap, vec::Vec}; -use core::fmt::{self, Display, Formatter}; -#[cfg(any(feature = "testing", test))] -use core::iter; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(feature = "json-schema")] -use once_cell::sync::Lazy; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; -#[cfg(feature = "json-schema")] -use serde_map_to_array::KeyValueJsonSchema; -use serde_map_to_array::{BTreeMapToArray, KeyValueLabels}; - -#[cfg(any(feature = "testing", test))] -use crate::testing::TestRng; -#[cfg(feature = "json-schema")] -use crate::SecretKey; -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - Digest, DisplayIter, PublicKey, -}; - -#[cfg(feature = "json-schema")] -static ERA_REPORT: Lazy> = Lazy::new(|| { - let secret_key_1 = SecretKey::ed25519_from_bytes([0; 32]).unwrap(); - let public_key_1 = PublicKey::from(&secret_key_1); - let equivocators = vec![public_key_1]; - - let secret_key_3 = SecretKey::ed25519_from_bytes([2; 32]).unwrap(); - let public_key_3 = PublicKey::from(&secret_key_3); - let inactive_validators = vec![public_key_3]; - - let rewards = BTreeMap::new(); - - EraReport { - equivocators, - rewards, - inactive_validators, - } -}); - -/// Equivocation, reward and validator inactivity information. -/// -/// `VID` represents validator ID type, generally [`PublicKey`]. -#[derive(Clone, Debug, PartialOrd, Ord, PartialEq, Eq, Hash, Serialize, Deserialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[serde(bound( - serialize = "VID: Ord + Serialize", - deserialize = "VID: Ord + Deserialize<'de>", -))] -#[cfg_attr( - feature = "json-schema", - schemars(description = "Equivocation, reward and validator inactivity information.") -)] -pub struct EraReport { - /// The set of equivocators. - pub(super) equivocators: Vec, - /// Rewards for finalization of earlier blocks. - #[serde(with = "BTreeMapToArray::")] - pub(super) rewards: BTreeMap, - /// Validators that haven't produced any unit during the era. - pub(super) inactive_validators: Vec, -} - -impl EraReport { - /// Constructs a new `EraReport`. - pub fn new( - equivocators: Vec, - rewards: BTreeMap, - inactive_validators: Vec, - ) -> Self { - EraReport { - equivocators, - rewards, - inactive_validators, - } - } - - /// Returns the set of equivocators. - pub fn equivocators(&self) -> &[VID] { - &self.equivocators - } - - /// Returns rewards for finalization of earlier blocks. - /// - /// This is a measure of the value of each validator's contribution to consensus, in - /// fractions of the configured maximum block reward. - pub fn rewards(&self) -> &BTreeMap { - &self.rewards - } - - /// Returns validators that haven't produced any unit during the era. - pub fn inactive_validators(&self) -> &[VID] { - &self.inactive_validators - } - - /// Returns a cryptographic hash of the `EraReport`. - pub fn hash(&self) -> Digest - where - VID: ToBytes, - { - // Helper function to hash slice of validators - fn hash_slice_of_validators(slice_of_validators: &[VID]) -> Digest - where - VID: ToBytes, - { - Digest::hash_merkle_tree(slice_of_validators.iter().map(|validator| { - Digest::hash(validator.to_bytes().expect("Could not serialize validator")) - })) - } - - // Pattern match here leverages compiler to ensure every field is accounted for - let EraReport { - equivocators, - inactive_validators, - rewards, - } = self; - - let hashed_equivocators = hash_slice_of_validators(equivocators); - let hashed_inactive_validators = hash_slice_of_validators(inactive_validators); - let hashed_rewards = Digest::hash_btree_map(rewards).expect("Could not hash rewards"); - - Digest::hash_slice_rfold(&[ - hashed_equivocators, - hashed_rewards, - hashed_inactive_validators, - ]) - } -} - -impl Default for EraReport { - fn default() -> Self { - EraReport { - equivocators: vec![], - rewards: BTreeMap::new(), - inactive_validators: vec![], - } - } -} - -impl Display for EraReport { - fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { - let slashings = DisplayIter::new(&self.equivocators); - let rewards = DisplayIter::new( - self.rewards - .iter() - .map(|(public_key, amount)| format!("{}: {}", public_key, amount)), - ); - write!(f, "era end: slash {}, reward {}", slashings, rewards) - } -} - -impl ToBytes for EraReport { - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.equivocators.write_bytes(writer)?; - self.rewards.write_bytes(writer)?; - self.inactive_validators.write_bytes(writer) - } - - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - self.equivocators.serialized_length() - + self.rewards.serialized_length() - + self.inactive_validators.serialized_length() - } -} - -impl FromBytes for EraReport { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (equivocators, remainder) = Vec::::from_bytes(bytes)?; - let (rewards, remainder) = BTreeMap::::from_bytes(remainder)?; - let (inactive_validators, remainder) = Vec::::from_bytes(remainder)?; - let era_report = EraReport { - equivocators, - rewards, - inactive_validators, - }; - Ok((era_report, remainder)) - } -} - -impl EraReport { - // This method is not intended to be used by third party crates. - #[doc(hidden)] - #[cfg(feature = "json-schema")] - pub fn example() -> &'static Self { - &ERA_REPORT - } - - /// Returns a random `EraReport`. - #[cfg(any(feature = "testing", test))] - pub fn random(rng: &mut TestRng) -> Self { - use rand::Rng; - - let equivocators_count = rng.gen_range(0..5); - let rewards_count = rng.gen_range(0..5); - let inactive_count = rng.gen_range(0..5); - let equivocators = iter::repeat_with(|| PublicKey::random(rng)) - .take(equivocators_count) - .collect(); - let rewards = iter::repeat_with(|| { - let pub_key = PublicKey::random(rng); - let reward = rng.gen_range(1..(1_000_000_000 + 1)); - (pub_key, reward) - }) - .take(rewards_count) - .collect(); - let inactive_validators = iter::repeat_with(|| PublicKey::random(rng)) - .take(inactive_count) - .collect(); - EraReport::new(equivocators, rewards, inactive_validators) - } -} - -struct EraRewardsLabels; - -impl KeyValueLabels for EraRewardsLabels { - const KEY: &'static str = "validator"; - const VALUE: &'static str = "amount"; -} - -#[cfg(feature = "json-schema")] -impl KeyValueJsonSchema for EraRewardsLabels { - const JSON_SCHEMA_KV_NAME: Option<&'static str> = Some("EraReward"); - const JSON_SCHEMA_KV_DESCRIPTION: Option<&'static str> = Some( - "A validator's public key paired with a measure of the value of its \ - contribution to consensus, as a fraction of the configured maximum block reward.", - ); - const JSON_SCHEMA_KEY_DESCRIPTION: Option<&'static str> = Some("The validator's public key."); - const JSON_SCHEMA_VALUE_DESCRIPTION: Option<&'static str> = Some("The reward amount."); -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn bytesrepr_roundtrip() { - let rng = &mut TestRng::new(); - let era_report = EraReport::random(rng); - bytesrepr::test_serialization_roundtrip(&era_report); - } -} diff --git a/casper_types_ver_2_0/src/block/era_end/era_end_v2.rs b/casper_types_ver_2_0/src/block/era_end/era_end_v2.rs deleted file mode 100644 index 2b7fe163..00000000 --- a/casper_types_ver_2_0/src/block/era_end/era_end_v2.rs +++ /dev/null @@ -1,249 +0,0 @@ -use alloc::{collections::BTreeMap, vec::Vec}; -use core::fmt; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(feature = "json-schema")] -use once_cell::sync::Lazy; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; -#[cfg(feature = "json-schema")] -use serde_map_to_array::KeyValueJsonSchema; -use serde_map_to_array::{BTreeMapToArray, KeyValueLabels}; - -#[cfg(feature = "json-schema")] -use crate::SecretKey; -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - DisplayIter, PublicKey, U512, -}; - -#[cfg(feature = "json-schema")] -static ERA_END_V2: Lazy = Lazy::new(|| { - let secret_key_1 = SecretKey::ed25519_from_bytes([0; 32]).unwrap(); - let public_key_1 = PublicKey::from(&secret_key_1); - let secret_key_3 = SecretKey::ed25519_from_bytes([2; 32]).unwrap(); - let public_key_3 = PublicKey::from(&secret_key_3); - - let equivocators = vec![public_key_1.clone()]; - let inactive_validators = vec![public_key_3]; - let next_era_validator_weights = { - let mut next_era_validator_weights: BTreeMap = BTreeMap::new(); - next_era_validator_weights.insert(public_key_1, U512::from(123)); - next_era_validator_weights.insert( - PublicKey::from( - &SecretKey::ed25519_from_bytes([5u8; SecretKey::ED25519_LENGTH]).unwrap(), - ), - U512::from(456), - ); - next_era_validator_weights.insert( - PublicKey::from( - &SecretKey::ed25519_from_bytes([6u8; SecretKey::ED25519_LENGTH]).unwrap(), - ), - U512::from(789), - ); - next_era_validator_weights - }; - let rewards = Default::default(); - - EraEndV2::new( - equivocators, - inactive_validators, - next_era_validator_weights, - rewards, - ) -}); - -/// Information related to the end of an era, and validator weights for the following era. -#[derive(Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -pub struct EraEndV2 { - /// The set of equivocators. - pub(super) equivocators: Vec, - /// Validators that haven't produced any unit during the era. - pub(super) inactive_validators: Vec, - /// The validators for the upcoming era and their respective weights. - #[serde(with = "BTreeMapToArray::")] - pub(super) next_era_validator_weights: BTreeMap, - /// The rewards distributed to the validators. - pub(super) rewards: BTreeMap, -} - -impl EraEndV2 { - /// Returns the set of equivocators. - pub fn equivocators(&self) -> &[PublicKey] { - &self.equivocators - } - - /// Returns the validators that haven't produced any unit during the era. - pub fn inactive_validators(&self) -> &[PublicKey] { - &self.inactive_validators - } - - /// Returns the validators for the upcoming era and their respective weights. - pub fn next_era_validator_weights(&self) -> &BTreeMap { - &self.next_era_validator_weights - } - - /// Returns the rewards distributed to the validators. - pub fn rewards(&self) -> &BTreeMap { - &self.rewards - } - - // This method is not intended to be used by third party crates. - #[doc(hidden)] - pub fn new( - equivocators: Vec, - inactive_validators: Vec, - next_era_validator_weights: BTreeMap, - rewards: BTreeMap, - ) -> Self { - EraEndV2 { - equivocators, - inactive_validators, - next_era_validator_weights, - rewards, - } - } - - // This method is not intended to be used by third party crates. - #[doc(hidden)] - #[cfg(feature = "json-schema")] - pub fn example() -> &'static Self { - &ERA_END_V2 - } - - /// Returns a random `EraReport`. - #[cfg(any(feature = "testing", test))] - pub fn random(rng: &mut crate::testing::TestRng) -> Self { - use rand::Rng; - - let equivocators_count = rng.gen_range(0..5); - let inactive_count = rng.gen_range(0..5); - let next_era_validator_weights_count = rng.gen_range(0..5); - let rewards_count = rng.gen_range(0..5); - - let equivocators = core::iter::repeat_with(|| PublicKey::random(rng)) - .take(equivocators_count) - .collect(); - - let inactive_validators = core::iter::repeat_with(|| PublicKey::random(rng)) - .take(inactive_count) - .collect(); - - let next_era_validator_weights = core::iter::repeat_with(|| { - let pub_key = PublicKey::random(rng); - let reward = rng.gen_range(1..=1_000_000_000); - (pub_key, U512::from(reward)) - }) - .take(next_era_validator_weights_count) - .collect(); - - let rewards = core::iter::repeat_with(|| { - let pub_key = PublicKey::random(rng); - let reward = rng.gen_range(1..=1_000_000_000); - (pub_key, U512::from(reward)) - }) - .take(rewards_count) - .collect(); - - Self::new( - equivocators, - inactive_validators, - next_era_validator_weights, - rewards, - ) - } -} - -impl ToBytes for EraEndV2 { - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - let EraEndV2 { - equivocators, - inactive_validators, - next_era_validator_weights, - rewards, - } = self; - - equivocators.write_bytes(writer)?; - inactive_validators.write_bytes(writer)?; - next_era_validator_weights.write_bytes(writer)?; - rewards.write_bytes(writer)?; - - Ok(()) - } - - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - let EraEndV2 { - equivocators, - inactive_validators, - next_era_validator_weights, - rewards, - } = self; - - equivocators.serialized_length() - + inactive_validators.serialized_length() - + next_era_validator_weights.serialized_length() - + rewards.serialized_length() - } -} - -impl FromBytes for EraEndV2 { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (equivocators, bytes) = Vec::from_bytes(bytes)?; - let (inactive_validators, bytes) = Vec::from_bytes(bytes)?; - let (next_era_validator_weights, bytes) = BTreeMap::from_bytes(bytes)?; - let (rewards, bytes) = BTreeMap::from_bytes(bytes)?; - let era_end = EraEndV2 { - equivocators, - inactive_validators, - next_era_validator_weights, - rewards, - }; - - Ok((era_end, bytes)) - } -} - -impl fmt::Display for EraEndV2 { - fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - let slashings = DisplayIter::new(&self.equivocators); - let rewards = DisplayIter::new( - self.rewards - .iter() - .map(|(public_key, amount)| format!("{}: {}", public_key, amount)), - ); - - write!( - formatter, - "era end: slash {}, reward {}", - slashings, rewards - ) - } -} - -struct NextEraValidatorLabels; - -impl KeyValueLabels for NextEraValidatorLabels { - const KEY: &'static str = "validator"; - const VALUE: &'static str = "weight"; -} - -#[cfg(feature = "json-schema")] -impl KeyValueJsonSchema for NextEraValidatorLabels { - const JSON_SCHEMA_KV_NAME: Option<&'static str> = Some("ValidatorWeight"); - const JSON_SCHEMA_KV_DESCRIPTION: Option<&'static str> = Some( - "A validator's public key paired with its weight, i.e. the total number of \ - motes staked by it and its delegators.", - ); - const JSON_SCHEMA_KEY_DESCRIPTION: Option<&'static str> = Some("The validator's public key."); - const JSON_SCHEMA_VALUE_DESCRIPTION: Option<&'static str> = Some("The validator's weight."); -} diff --git a/casper_types_ver_2_0/src/block/finality_signature.rs b/casper_types_ver_2_0/src/block/finality_signature.rs deleted file mode 100644 index 57b1c2a6..00000000 --- a/casper_types_ver_2_0/src/block/finality_signature.rs +++ /dev/null @@ -1,266 +0,0 @@ -use alloc::vec::Vec; -use core::{ - cmp::Ordering, - fmt::{self, Display, Formatter}, - hash::{Hash, Hasher}, -}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(any(feature = "once_cell", test))] -use once_cell::sync::OnceCell; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -use super::BlockHash; -#[cfg(any(feature = "testing", test))] -use crate::testing::TestRng; -use crate::{crypto, EraId, PublicKey, SecretKey, Signature}; - -/// A validator's signature of a block, confirming it is finalized. -/// -/// Clients and joining nodes should wait until the signers' combined weight exceeds the fault -/// tolerance threshold before accepting the block as finalized. -#[derive(Clone, Eq, Serialize, Deserialize, Debug)] -#[cfg_attr( - feature = "json-schema", - derive(JsonSchema), - schemars(description = "A validator's signature of a block, confirming it is finalized.") -)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -pub struct FinalitySignature { - /// The block hash of the associated block. - pub(super) block_hash: BlockHash, - /// The era in which the associated block was created. - pub(super) era_id: EraId, - /// The signature over the block hash of the associated block. - pub(super) signature: Signature, - /// The public key of the signing validator. - pub(super) public_key: PublicKey, - #[serde(skip)] - #[cfg_attr( - all(any(feature = "once_cell", test), feature = "datasize"), - data_size(skip) - )] - #[cfg(any(feature = "once_cell", test))] - pub(super) is_verified: OnceCell>, -} - -impl FinalitySignature { - /// Constructs a new `FinalitySignature`. - pub fn create(block_hash: BlockHash, era_id: EraId, secret_key: &SecretKey) -> Self { - let bytes = Self::bytes_to_sign(&block_hash, era_id); - let public_key = PublicKey::from(secret_key); - let signature = crypto::sign(bytes, secret_key, &public_key); - FinalitySignature { - block_hash, - era_id, - signature, - public_key, - #[cfg(any(feature = "once_cell", test))] - is_verified: OnceCell::with_value(Ok(())), - } - } - - /// Returns the block hash of the associated block. - pub fn block_hash(&self) -> &BlockHash { - &self.block_hash - } - - /// Returns the era in which the associated block was created. - pub fn era_id(&self) -> EraId { - self.era_id - } - - /// Returns the signature over the block hash of the associated block. - pub fn signature(&self) -> &Signature { - &self.signature - } - - /// Returns the public key of the signing validator. - pub fn public_key(&self) -> &PublicKey { - &self.public_key - } - - /// Returns `Ok` if the signature is cryptographically valid. - pub fn is_verified(&self) -> Result<(), crypto::Error> { - #[cfg(any(feature = "once_cell", test))] - return self.is_verified.get_or_init(|| self.verify()).clone(); - - #[cfg(not(any(feature = "once_cell", test)))] - self.verify() - } - - /// Constructs a new `FinalitySignature`. - #[cfg(any(feature = "testing", test))] - pub fn new( - block_hash: BlockHash, - era_id: EraId, - signature: Signature, - public_key: PublicKey, - ) -> Self { - FinalitySignature { - block_hash, - era_id, - signature, - public_key, - #[cfg(any(feature = "once_cell", test))] - is_verified: OnceCell::new(), - } - } - - /// Returns a random `FinalitySignature`. - #[cfg(any(feature = "testing", test))] - pub fn random(rng: &mut TestRng) -> Self { - FinalitySignature::random_for_block(BlockHash::random(rng), EraId::random(rng), rng) - } - - /// Returns a random `FinalitySignature` for the provided `block_hash` and `era_id`. - #[cfg(any(feature = "testing", test))] - pub fn random_for_block(block_hash: BlockHash, era_id: EraId, rng: &mut TestRng) -> Self { - let secret_key = SecretKey::random(rng); - FinalitySignature::create(block_hash, era_id, &secret_key) - } - - fn bytes_to_sign(block_hash: &BlockHash, era_id: EraId) -> Vec { - let mut bytes = block_hash.inner().into_vec(); - bytes.extend_from_slice(&era_id.to_le_bytes()); - bytes - } - - fn verify(&self) -> Result<(), crypto::Error> { - let bytes = Self::bytes_to_sign(&self.block_hash, self.era_id); - crypto::verify(bytes, &self.signature, &self.public_key) - } -} - -impl Hash for FinalitySignature { - fn hash(&self, state: &mut H) { - // Ensure we initialize self.is_verified field. - let is_verified = self.is_verified().is_ok(); - // Destructure to make sure we don't accidentally omit fields. - #[cfg(any(feature = "once_cell", test))] - let FinalitySignature { - block_hash, - era_id, - signature, - public_key, - is_verified: _, - } = self; - #[cfg(not(any(feature = "once_cell", test)))] - let FinalitySignature { - block_hash, - era_id, - signature, - public_key, - } = self; - block_hash.hash(state); - era_id.hash(state); - signature.hash(state); - public_key.hash(state); - is_verified.hash(state); - } -} - -impl PartialEq for FinalitySignature { - fn eq(&self, other: &FinalitySignature) -> bool { - // Ensure we initialize self.is_verified field. - let is_verified = self.is_verified().is_ok(); - // Destructure to make sure we don't accidentally omit fields. - #[cfg(any(feature = "once_cell", test))] - let FinalitySignature { - block_hash, - era_id, - signature, - public_key, - is_verified: _, - } = self; - #[cfg(not(any(feature = "once_cell", test)))] - let FinalitySignature { - block_hash, - era_id, - signature, - public_key, - } = self; - *block_hash == other.block_hash - && *era_id == other.era_id - && *signature == other.signature - && *public_key == other.public_key - && is_verified == other.is_verified().is_ok() - } -} - -impl Ord for FinalitySignature { - fn cmp(&self, other: &FinalitySignature) -> Ordering { - // Ensure we initialize self.is_verified field. - let is_verified = self.is_verified().is_ok(); - // Destructure to make sure we don't accidentally omit fields. - #[cfg(any(feature = "once_cell", test))] - let FinalitySignature { - block_hash, - era_id, - signature, - public_key, - is_verified: _, - } = self; - #[cfg(not(any(feature = "once_cell", test)))] - let FinalitySignature { - block_hash, - era_id, - signature, - public_key, - } = self; - block_hash - .cmp(&other.block_hash) - .then_with(|| era_id.cmp(&other.era_id)) - .then_with(|| signature.cmp(&other.signature)) - .then_with(|| public_key.cmp(&other.public_key)) - .then_with(|| is_verified.cmp(&other.is_verified().is_ok())) - } -} - -impl PartialOrd for FinalitySignature { - fn partial_cmp(&self, other: &FinalitySignature) -> Option { - Some(self.cmp(other)) - } -} - -impl Display for FinalitySignature { - fn fmt(&self, formatter: &mut Formatter<'_>) -> fmt::Result { - write!( - formatter, - "finality signature for {}, from {}", - self.block_hash, self.public_key - ) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::TestBlockBuilder; - - #[test] - fn finality_signature() { - let rng = &mut TestRng::new(); - let block = TestBlockBuilder::new().build(rng); - // Signature should be over both block hash and era id. - let secret_key = SecretKey::random(rng); - let public_key = PublicKey::from(&secret_key); - let era_id = EraId::from(1); - let finality_signature = FinalitySignature::create(*block.hash(), era_id, &secret_key); - finality_signature.is_verified().unwrap(); - let signature = finality_signature.signature; - // Verify that signature includes era id. - let invalid_finality_signature = FinalitySignature { - block_hash: *block.hash(), - era_id: EraId::from(2), - signature, - public_key, - is_verified: OnceCell::new(), - }; - // Test should fail b/c `signature` is over `era_id=1` and here we're using `era_id=2`. - assert!(invalid_finality_signature.is_verified().is_err()); - } -} diff --git a/casper_types_ver_2_0/src/block/finality_signature_id.rs b/casper_types_ver_2_0/src/block/finality_signature_id.rs deleted file mode 100644 index 211071e2..00000000 --- a/casper_types_ver_2_0/src/block/finality_signature_id.rs +++ /dev/null @@ -1,55 +0,0 @@ -use core::fmt::{self, Display, Formatter}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -use serde::{Deserialize, Serialize}; - -use super::BlockHash; -#[cfg(doc)] -use super::FinalitySignature; -use crate::{EraId, PublicKey}; - -/// An identifier for a [`FinalitySignature`]. -#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -pub struct FinalitySignatureId { - block_hash: BlockHash, - era_id: EraId, - public_key: PublicKey, -} - -impl FinalitySignatureId { - /// Returns a new `FinalitySignatureId`. - pub fn new(block_hash: BlockHash, era_id: EraId, public_key: PublicKey) -> Self { - FinalitySignatureId { - block_hash, - era_id, - public_key, - } - } - - /// Returns the block hash of the associated block. - pub fn block_hash(&self) -> &BlockHash { - &self.block_hash - } - - /// Returns the era in which the associated block was created. - pub fn era_id(&self) -> EraId { - self.era_id - } - - /// Returns the public key of the signing validator. - pub fn public_key(&self) -> &PublicKey { - &self.public_key - } -} - -impl Display for FinalitySignatureId { - fn fmt(&self, formatter: &mut Formatter<'_>) -> fmt::Result { - write!( - formatter, - "finality signature id for {}, from {}", - self.block_hash, self.public_key - ) - } -} diff --git a/casper_types_ver_2_0/src/block/json_compatibility.rs b/casper_types_ver_2_0/src/block/json_compatibility.rs deleted file mode 100644 index 1c256376..00000000 --- a/casper_types_ver_2_0/src/block/json_compatibility.rs +++ /dev/null @@ -1,8 +0,0 @@ -//! This module provides types primarily to support converting instances of `BTreeMap` into -//! `Vec<(K, V)>` or similar, in order to allow these types to be able to be converted to and from -//! JSON, and to allow for the production of a static schema for them. - -#![cfg(all(feature = "std", feature = "json-schema"))] -mod json_block_with_signatures; - -pub use json_block_with_signatures::JsonBlockWithSignatures; diff --git a/casper_types_ver_2_0/src/block/json_compatibility/json_block_with_signatures.rs b/casper_types_ver_2_0/src/block/json_compatibility/json_block_with_signatures.rs deleted file mode 100644 index 71d472ea..00000000 --- a/casper_types_ver_2_0/src/block/json_compatibility/json_block_with_signatures.rs +++ /dev/null @@ -1,95 +0,0 @@ -use alloc::collections::BTreeMap; -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(feature = "json-schema")] -use once_cell::sync::Lazy; -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; -use serde_map_to_array::{BTreeMapToArray, KeyValueJsonSchema, KeyValueLabels}; - -use crate::{crypto, Block, BlockSignatures, BlockV2, PublicKey, SecretKey, Signature}; - -#[cfg(feature = "json-schema")] -static JSON_SIGNED_BLOCK: Lazy = Lazy::new(|| { - let block = BlockV2::example().clone(); - let secret_key = SecretKey::example(); - let public_key = PublicKey::from(secret_key); - let signature = crypto::sign(block.hash.inner(), secret_key, &public_key); - let mut proofs = BTreeMap::new(); - proofs.insert(public_key, signature); - - JsonBlockWithSignatures { - block: block.into(), - proofs, - } -}); - -/// A JSON-friendly representation of a block and the signatures for that block. -#[derive(Clone, PartialEq, Eq, Serialize, Deserialize, Debug, JsonSchema)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[serde(deny_unknown_fields)] -pub struct JsonBlockWithSignatures { - /// The block. - pub block: Block, - /// The proofs of the block, i.e. a collection of validators' signatures of the block hash. - #[serde(with = "BTreeMapToArray::")] - pub proofs: BTreeMap, -} - -impl JsonBlockWithSignatures { - /// Constructs a new `JsonBlock`. - pub fn new(block: Block, maybe_signatures: Option) -> Self { - let proofs = maybe_signatures - .map(|signatures| signatures.proofs) - .unwrap_or_default(); - - JsonBlockWithSignatures { block, proofs } - } - - // This method is not intended to be used by third party crates. - #[doc(hidden)] - pub fn example() -> &'static Self { - &JSON_SIGNED_BLOCK - } -} -struct BlockProofLabels; - -impl KeyValueLabels for BlockProofLabels { - const KEY: &'static str = "public_key"; - const VALUE: &'static str = "signature"; -} - -impl KeyValueJsonSchema for BlockProofLabels { - const JSON_SCHEMA_KV_NAME: Option<&'static str> = Some("BlockProof"); - const JSON_SCHEMA_KV_DESCRIPTION: Option<&'static str> = Some( - "A validator's public key paired with a corresponding signature of a given block hash.", - ); - const JSON_SCHEMA_KEY_DESCRIPTION: Option<&'static str> = Some("The validator's public key."); - const JSON_SCHEMA_VALUE_DESCRIPTION: Option<&'static str> = Some("The validator's signature."); -} - -#[cfg(test)] -mod tests { - use crate::{testing::TestRng, TestBlockBuilder}; - - use super::*; - - #[test] - fn block_to_and_from_json_block_with_signatures() { - let rng = &mut TestRng::new(); - let block: Block = TestBlockBuilder::new().build(rng).into(); - let empty_signatures = BlockSignatures::new(*block.hash(), block.era_id()); - let json_block = JsonBlockWithSignatures::new(block.clone(), Some(empty_signatures)); - let recovered_block = Block::from(json_block); - assert_eq!(block, recovered_block); - } - - #[test] - fn json_block_roundtrip() { - let rng = &mut TestRng::new(); - let block: Block = TestBlockBuilder::new().build(rng).into(); - let json_string = serde_json::to_string_pretty(&block).unwrap(); - let decoded = serde_json::from_str(&json_string).unwrap(); - assert_eq!(block, decoded); - } -} diff --git a/casper_types_ver_2_0/src/block/rewarded_signatures.rs b/casper_types_ver_2_0/src/block/rewarded_signatures.rs deleted file mode 100644 index 082aae36..00000000 --- a/casper_types_ver_2_0/src/block/rewarded_signatures.rs +++ /dev/null @@ -1,474 +0,0 @@ -use alloc::{collections::BTreeSet, vec::Vec}; - -use crate::{ - bytesrepr::{self, Bytes, FromBytes, ToBytes}, - PublicKey, -}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; - -use serde::{Deserialize, Serialize}; -use tracing::error; - -/// Describes finality signatures that will be rewarded in a block. Consists of a vector of -/// `SingleBlockRewardedSignatures`, each of which describes signatures for a single ancestor -/// block. The first entry represents the signatures for the parent block, the second for the -/// parent of the parent, and so on. -#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Default, Serialize, Deserialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -pub struct RewardedSignatures(Vec); - -/// List of identifiers for finality signatures for a particular past block. -/// -/// That past block height is current_height - signature_rewards_max_delay, the latter being defined -/// in the chainspec. -/// -/// We need to wait for a few blocks to pass (`signature_rewards_max_delay`) to store the finality -/// signers because we need a bit of time to get the block finality. -#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Default, Serialize, Deserialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -pub struct SingleBlockRewardedSignatures(Vec); - -impl SingleBlockRewardedSignatures { - /// Creates a new set of recorded finality signaures from the era's validators + - /// the list of validators which signed. - pub fn from_validator_set<'a>( - public_keys: &BTreeSet, - all_validators: impl IntoIterator, - ) -> Self { - // Take the validators list - // Replace the ones who signed with 1 and the ones who didn't with 0 - // Pack everything into bytes - let result = Self::pack( - all_validators - .into_iter() - .map(|key| u8::from(public_keys.contains(key))), - ); - - let included_count: u32 = result.0.iter().map(|c| c.count_ones()).sum(); - if included_count as usize != public_keys.len() { - error!( - included_count, - expected_count = public_keys.len(), - "error creating past finality signatures from validator set" - ); - } - - result - } - - /// Gets the list of validators which signed from a set of recorded finality signaures (`self`) - /// + the era's validators. - pub fn to_validator_set( - &self, - all_validators: impl IntoIterator, - ) -> BTreeSet { - self.unpack() - .zip(all_validators) - .filter_map(|(active, validator)| (active != 0).then_some(validator)) - .collect() - } - - /// Packs the bits to bytes, to create a `PastFinalitySignature` - /// from an iterator of bits. - /// - /// If a value is neither 1 nor 0, it is interpreted as a 1. - #[doc(hidden)] - pub fn pack(bits: impl Iterator) -> Self { - //use itertools::Itertools; - - fn set_bit_at(value: u8, position: usize) -> u8 { - // Sanitize the value (must be 0 or 1): - let value = u8::from(value != 0); - - value << (7 - position) - } - - let inner = chunks_8(bits) - .map(|bits_chunk| { - bits_chunk - .enumerate() - .fold(0, |acc, (pos, value)| acc | set_bit_at(value, pos)) - }) - .collect(); - - SingleBlockRewardedSignatures(inner) - } - - /// Unpacks the bytes to bits, - /// to get a human readable representation of `PastFinalitySignature`. - #[doc(hidden)] - pub fn unpack(&self) -> impl Iterator + '_ { - // Returns the bit at the given position (0 or 1): - fn bit_at(byte: u8, position: u8) -> u8 { - (byte & (0b1000_0000 >> position)) >> (7 - position) - } - - self.0 - .iter() - .flat_map(|&byte| (0..8).map(move |i| bit_at(byte, i))) - } - - /// Calculates the set difference of two instances of `SingleBlockRewardedSignatures`. - #[doc(hidden)] - pub fn difference(mut self, other: &SingleBlockRewardedSignatures) -> Self { - for (self_byte, other_byte) in self.0.iter_mut().zip(other.0.iter()) { - *self_byte &= !other_byte; - } - self - } - - /// Calculates the set intersection of two instances of `SingleBlockRewardedSignatures`. - pub(crate) fn intersection(mut self, other: &SingleBlockRewardedSignatures) -> Self { - self.0 = self - .0 - .iter() - .zip(other.0.iter()) - .map(|(a, b)| *a & *b) - .collect(); - self - } - - /// Returns `true` if the set contains at least one signature. - pub(crate) fn has_some(&self) -> bool { - self.0.iter().any(|byte| *byte != 0) - } -} - -impl ToBytes for SingleBlockRewardedSignatures { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - buffer.extend(Bytes::from(self.0.as_ref()).to_bytes()?); - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - self.0.serialized_length() - } -} - -impl FromBytes for SingleBlockRewardedSignatures { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (inner, rest) = Bytes::from_bytes(bytes)?; - Ok((SingleBlockRewardedSignatures(inner.into()), rest)) - } -} - -impl RewardedSignatures { - /// Creates a new instance of `RewardedSignatures`. - pub fn new>( - single_block_signatures: I, - ) -> Self { - Self(single_block_signatures.into_iter().collect()) - } - - /// Creates an instance of `RewardedSignatures` based on its unpacked (one byte per validator) - /// representation. - pub fn pack(unpacked: Vec>) -> Self { - Self( - unpacked - .into_iter() - .map(|single_block_signatures| { - SingleBlockRewardedSignatures::pack(single_block_signatures.into_iter()) - }) - .collect(), - ) - } - - /// Creates an unpacked (one byte per validator) representation of the finality signatures to - /// be rewarded in this block. - pub fn unpack(&self) -> Vec> { - self.0 - .iter() - .map(|single_block_signatures| single_block_signatures.unpack().collect()) - .collect() - } - - /// Returns this instance of `RewardedSignatures` with `num_blocks` of empty signatures - /// prepended. - pub fn left_padded(self, num_blocks: usize) -> Self { - Self( - core::iter::repeat_with(SingleBlockRewardedSignatures::default) - .take(num_blocks) - .chain(self.0) - .collect(), - ) - } - - /// Calculates the set difference between two instances of `RewardedSignatures`. - pub fn difference(self, other: &RewardedSignatures) -> Self { - Self( - self.0 - .into_iter() - .zip(other.0.iter()) - .map(|(single_block_signatures, other_block_signatures)| { - single_block_signatures.difference(other_block_signatures) - }) - .collect(), - ) - } - - /// Calculates the set intersection between two instances of `RewardedSignatures`. - pub fn intersection(&self, other: &RewardedSignatures) -> Self { - Self( - self.0 - .iter() - .zip(other.0.iter()) - .map(|(single_block_signatures, other_block_signatures)| { - single_block_signatures - .clone() - .intersection(other_block_signatures) - }) - .collect(), - ) - } - - /// Iterates over the `SingleBlockRewardedSignatures` for each rewarded block. - pub fn iter(&self) -> impl Iterator { - self.0.iter() - } - - /// Iterates over the `SingleBlockRewardedSignatures`, yielding the signatures together with - /// the block height for each entry. `block_height` is the height of the block that contains - /// this instance of `RewardedSignatures`. - pub fn iter_with_height( - &self, - block_height: u64, - ) -> impl Iterator { - self.0.iter().enumerate().map(move |(rel_height, sbrs)| { - ( - block_height - .saturating_sub(rel_height as u64) - .saturating_sub(1), - sbrs, - ) - }) - } - - /// Returns `true` if there is at least one cited signature. - pub fn has_some(&self) -> bool { - self.0.iter().any(|signatures| signatures.has_some()) - } -} - -pub(crate) static EMPTY: RewardedSignatures = RewardedSignatures(Vec::new()); - -impl ToBytes for RewardedSignatures { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - self.0.to_bytes() - } - - fn serialized_length(&self) -> usize { - self.0.serialized_length() - } -} - -impl FromBytes for RewardedSignatures { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - Vec::::from_bytes(bytes) - .map(|(inner, rest)| (RewardedSignatures(inner), rest)) - } -} - -/// Chunks an iterator over `u8`s into pieces of maximum size of 8. -fn chunks_8(bits: impl Iterator) -> impl Iterator> { - struct Chunks(B); - - struct Chunk { - values: [u8; 8], - index: usize, - max: usize, - } - - impl Iterator for Chunks - where - B: Iterator, - { - type Item = Chunk; - - fn next(&mut self) -> Option { - let mut values = [0; 8]; - let max = core::iter::zip(&mut values, &mut self.0) - .map(|(array_slot, value)| *array_slot = value) - .count(); - - (max != 0).then_some(Chunk { - values, - max, - index: 0, - }) - } - } - - impl Iterator for Chunk { - type Item = u8; - - fn next(&mut self) -> Option { - if self.index < self.max { - let n = self.values.get(self.index).cloned(); - self.index += 1; - n - } else { - None - } - } - } - - Chunks(bits) -} - -#[cfg(any(feature = "testing", test))] -impl SingleBlockRewardedSignatures { - /// Returns random data. - pub fn random(rng: &mut crate::testing::TestRng, n_validators: usize) -> Self { - let mut bytes = vec![0; (n_validators + 7) / 8]; - - rand::RngCore::fill_bytes(rng, bytes.as_mut()); - - SingleBlockRewardedSignatures(bytes) - } -} - -#[cfg(test)] -mod tests { - use super::{chunks_8, SingleBlockRewardedSignatures}; - use crate::{ - bytesrepr::{FromBytes, ToBytes}, - testing::TestRng, - PublicKey, - }; - use rand::{seq::IteratorRandom, Rng}; - use std::collections::BTreeSet; - - #[test] - fn empty_signatures() { - let rng = &mut TestRng::new(); - let validators: Vec<_> = std::iter::repeat_with(|| PublicKey::random(rng)) - .take(7) - .collect(); - let original_signed = BTreeSet::new(); - - let past_finality_signatures = - SingleBlockRewardedSignatures::from_validator_set(&original_signed, validators.iter()); - - assert_eq!(past_finality_signatures.0, &[0]); - - let signed = past_finality_signatures.to_validator_set(validators); - - assert_eq!(original_signed, signed); - } - - #[test] - fn from_and_to_methods_match_in_a_simple_case() { - let rng = &mut TestRng::new(); - let validators: Vec<_> = std::iter::repeat_with(|| PublicKey::random(rng)) - .take(11) - .collect(); - let signed = { - let mut signed = BTreeSet::new(); - signed.insert(validators[2].clone()); - signed.insert(validators[5].clone()); - signed.insert(validators[6].clone()); - signed.insert(validators[8].clone()); - signed.insert(validators[10].clone()); - signed - }; - - let past_finality_signatures = - SingleBlockRewardedSignatures::from_validator_set(&signed, validators.iter()); - - assert_eq!(past_finality_signatures.0, &[0b0010_0110, 0b1010_0000]); - - let signed_ = past_finality_signatures.to_validator_set(validators); - - assert_eq!(signed, signed_); - } - - #[test] - fn simple_serialization_roundtrip() { - let data = SingleBlockRewardedSignatures(vec![1, 2, 3, 4, 5]); - - let serialized = data.to_bytes().unwrap(); - assert_eq!(serialized.len(), data.0.len() + 4); - assert_eq!(data.serialized_length(), data.0.len() + 4); - - let (deserialized, rest) = SingleBlockRewardedSignatures::from_bytes(&serialized).unwrap(); - - assert_eq!(data, deserialized); - assert_eq!(rest, &[0u8; 0]); - } - - #[test] - fn serialization_roundtrip_of_empty_data() { - let data = SingleBlockRewardedSignatures::default(); - - let serialized = data.to_bytes().unwrap(); - assert_eq!(serialized, &[0; 4]); - assert_eq!(data.serialized_length(), 4); - - let (deserialized, rest) = SingleBlockRewardedSignatures::from_bytes(&serialized).unwrap(); - - assert_eq!(data, deserialized); - assert_eq!(rest, &[0u8; 0]); - } - - #[test] - fn serialization_roundtrip_of_random_data() { - let rng = &mut TestRng::new(); - let n_validators = rng.gen_range(50..200); - let all_validators: BTreeSet<_> = std::iter::repeat_with(|| PublicKey::random(rng)) - .take(n_validators) - .collect(); - let n_to_sign = rng.gen_range(0..all_validators.len()); - let public_keys = all_validators - .iter() - .cloned() - .choose_multiple(rng, n_to_sign) - .into_iter() - .collect(); - - let past_finality_signatures = - SingleBlockRewardedSignatures::from_validator_set(&public_keys, all_validators.iter()); - - let serialized = past_finality_signatures.to_bytes().unwrap(); - let (deserialized, rest) = SingleBlockRewardedSignatures::from_bytes(&serialized).unwrap(); - - assert_eq!(public_keys, deserialized.to_validator_set(all_validators)); - assert_eq!(rest, &[0u8; 0]); - } - - #[test] - fn chunk_iterator() { - fn v(maybe_chunk: Option>) -> Option> { - maybe_chunk.map(itertools::Itertools::collect_vec) - } - - // Empty chunks: - - let mut chunks = chunks_8(IntoIterator::into_iter([])); - - assert_eq!(v(chunks.next()), None); - - // Exact size chunk: - - let mut chunks = chunks_8(IntoIterator::into_iter([10, 11, 12, 13, 14, 15, 16, 17])); - - assert_eq!(v(chunks.next()), Some(vec![10, 11, 12, 13, 14, 15, 16, 17])); - assert_eq!(v(chunks.next()), None); - - // Chunks with a remainder: - - let mut chunks = chunks_8(IntoIterator::into_iter([ - 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, - ])); - - assert_eq!(v(chunks.next()), Some(vec![10, 11, 12, 13, 14, 15, 16, 17])); - assert_eq!(v(chunks.next()), Some(vec![18, 19, 20, 21, 22, 23, 24, 25])); - assert_eq!(v(chunks.next()), Some(vec![26])); - } -} diff --git a/casper_types_ver_2_0/src/block/rewards.rs b/casper_types_ver_2_0/src/block/rewards.rs deleted file mode 100644 index 66f5aff0..00000000 --- a/casper_types_ver_2_0/src/block/rewards.rs +++ /dev/null @@ -1,11 +0,0 @@ -use alloc::collections::BTreeMap; - -use crate::{PublicKey, U512}; - -/// Rewards distributed to validators. -pub enum Rewards<'a> { - /// Rewards for version 1, associate a ratio to each validator. - V1(&'a BTreeMap), - /// Rewards for version 1, associate a tokens amount to each validator. - V2(&'a BTreeMap), -} diff --git a/casper_types_ver_2_0/src/block/signed_block.rs b/casper_types_ver_2_0/src/block/signed_block.rs deleted file mode 100644 index a5d49d64..00000000 --- a/casper_types_ver_2_0/src/block/signed_block.rs +++ /dev/null @@ -1,80 +0,0 @@ -use alloc::vec::Vec; -use core::fmt::{self, Display, Formatter}; - -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - Block, BlockSignatures, -}; -#[cfg(any(feature = "std", feature = "json-schema", test))] -use serde::{Deserialize, Serialize}; - -/// A block and signatures for that block. -#[derive(Clone, Debug, PartialEq, Eq)] -#[cfg_attr( - any(feature = "std", feature = "json-schema", test), - derive(Serialize, Deserialize) -)] -pub struct SignedBlock { - /// Block. - pub(crate) block: Block, - // The signatures of the block. - pub(crate) block_signatures: BlockSignatures, -} - -impl SignedBlock { - /// Creates a new `SignedBlock`. - pub fn new(block: Block, block_signatures: BlockSignatures) -> Self { - Self { - block, - block_signatures, - } - } - - /// Returns the inner block. - pub fn block(&self) -> &Block { - &self.block - } - - /// Converts `self` into the block and signatures. - pub fn into_inner(self) -> (Block, BlockSignatures) { - (self.block, self.block_signatures) - } -} - -impl FromBytes for SignedBlock { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (block, bytes) = FromBytes::from_bytes(bytes)?; - let (block_signatures, bytes) = FromBytes::from_bytes(bytes)?; - Ok((SignedBlock::new(block, block_signatures), bytes)) - } -} - -impl ToBytes for SignedBlock { - fn to_bytes(&self) -> Result, crate::bytesrepr::Error> { - let mut buf = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buf)?; - Ok(buf) - } - - fn write_bytes(&self, bytes: &mut Vec) -> Result<(), crate::bytesrepr::Error> { - self.block.write_bytes(bytes)?; - self.block_signatures.write_bytes(bytes)?; - Ok(()) - } - - fn serialized_length(&self) -> usize { - self.block.serialized_length() + self.block_signatures.serialized_length() - } -} - -impl Display for SignedBlock { - fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { - write!( - f, - "block #{}, {}, with {} block signatures", - self.block.height(), - self.block.hash(), - self.block_signatures.len() - ) - } -} diff --git a/casper_types_ver_2_0/src/block/signed_block_header.rs b/casper_types_ver_2_0/src/block/signed_block_header.rs deleted file mode 100644 index a478314d..00000000 --- a/casper_types_ver_2_0/src/block/signed_block_header.rs +++ /dev/null @@ -1,143 +0,0 @@ -use core::fmt::{self, Display, Formatter}; -#[cfg(feature = "std")] -use std::error::Error as StdError; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(any(feature = "std", test))] -use serde::{Deserialize, Serialize}; - -use super::{BlockHash, BlockHeader, BlockSignatures}; -use crate::EraId; -#[cfg(any(feature = "testing", test))] -use crate::Signature; - -/// An error which can result from validating a [`SignedBlockHeader`]. -#[derive(Copy, Clone, Eq, PartialEq, Debug)] -#[non_exhaustive] -pub enum SignedBlockHeaderValidationError { - /// Mismatch between block hash in [`BlockHeader`] and [`BlockSignatures`]. - BlockHashMismatch { - /// The block hash in the `BlockHeader`. - block_hash_in_header: BlockHash, - /// The block hash in the `BlockSignatures`. - block_hash_in_signatures: BlockHash, - }, - /// Mismatch between era ID in [`BlockHeader`] and [`BlockSignatures`]. - EraIdMismatch { - /// The era ID in the `BlockHeader`. - era_id_in_header: EraId, - /// The era ID in the `BlockSignatures`. - era_id_in_signatures: EraId, - }, -} - -impl Display for SignedBlockHeaderValidationError { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - match self { - SignedBlockHeaderValidationError::BlockHashMismatch { - block_hash_in_header: expected, - block_hash_in_signatures: actual, - } => { - write!( - formatter, - "block hash mismatch - header: {}, signatures: {}", - expected, actual - ) - } - SignedBlockHeaderValidationError::EraIdMismatch { - era_id_in_header: expected, - era_id_in_signatures: actual, - } => { - write!( - formatter, - "era id mismatch - header: {}, signatures: {}", - expected, actual - ) - } - } - } -} - -#[cfg(feature = "std")] -impl StdError for SignedBlockHeaderValidationError {} - -/// A block header and collection of signatures of a given block. -#[derive(Clone, Eq, PartialEq, Debug)] -#[cfg_attr(any(feature = "std", test), derive(Serialize, Deserialize))] -#[cfg_attr(feature = "datasize", derive(DataSize))] -pub struct SignedBlockHeader { - block_header: BlockHeader, - block_signatures: BlockSignatures, -} - -impl SignedBlockHeader { - /// Returns a new `SignedBlockHeader`. - pub fn new(block_header: BlockHeader, block_signatures: BlockSignatures) -> Self { - SignedBlockHeader { - block_header, - block_signatures, - } - } - - /// Returns the block header. - pub fn block_header(&self) -> &BlockHeader { - &self.block_header - } - - /// Returns the block signatures. - pub fn block_signatures(&self) -> &BlockSignatures { - &self.block_signatures - } - - /// Returns `Ok` if and only if the block hash and era ID in the `BlockHeader` are identical to - /// those in the `BlockSignatures`. - /// - /// Note that no cryptographic verification of the contained signatures is performed. For this, - /// see [`BlockSignatures::is_verified`]. - pub fn is_valid(&self) -> Result<(), SignedBlockHeaderValidationError> { - if self.block_header.block_hash() != *self.block_signatures.block_hash() { - return Err(SignedBlockHeaderValidationError::BlockHashMismatch { - block_hash_in_header: self.block_header.block_hash(), - block_hash_in_signatures: *self.block_signatures.block_hash(), - }); - } - if self.block_header.era_id() != self.block_signatures.era_id() { - return Err(SignedBlockHeaderValidationError::EraIdMismatch { - era_id_in_header: self.block_header.era_id(), - era_id_in_signatures: self.block_signatures.era_id(), - }); - } - Ok(()) - } - - /// Sets the era ID contained in `block_signatures` to its max value, rendering it and hence - /// `self` invalid (assuming the relevant era ID for this `SignedBlockHeader` wasn't already - /// the max value). - #[cfg(any(feature = "testing", test))] - pub fn invalidate_era(&mut self) { - self.block_signatures.era_id = EraId::new(u64::MAX); - } - - /// Replaces the signature field of the last `block_signatures` entry with the `System` variant - /// of [`Signature`], rendering that entry invalid. - /// - /// Note that [`Self::is_valid`] will be unaffected by this as it only checks for equality in - /// the block hash and era ID of the header and signatures; no cryptographic verification is - /// performed. - #[cfg(any(feature = "testing", test))] - pub fn invalidate_last_signature(&mut self) { - let last_proof = self - .block_signatures - .proofs - .last_entry() - .expect("should have at least one signature"); - *last_proof.into_mut() = Signature::System; - } -} - -impl Display for SignedBlockHeader { - fn fmt(&self, f: &mut Formatter) -> fmt::Result { - write!(f, "{}, and {}", self.block_header, self.block_signatures) - } -} diff --git a/casper_types_ver_2_0/src/block/test_block_builder/test_block_v1_builder.rs b/casper_types_ver_2_0/src/block/test_block_builder/test_block_v1_builder.rs deleted file mode 100644 index 1a6b68a7..00000000 --- a/casper_types_ver_2_0/src/block/test_block_builder/test_block_v1_builder.rs +++ /dev/null @@ -1,183 +0,0 @@ -use std::iter; - -use rand::Rng; - -use crate::{testing::TestRng, Block, EraEndV1}; - -use crate::{ - system::auction::ValidatorWeights, BlockHash, BlockV1, Deploy, Digest, EraId, EraReport, - ProtocolVersion, PublicKey, Timestamp, U512, -}; - -/// A helper to build the blocks with various properties required for tests. -pub struct TestBlockV1Builder { - parent_hash: Option, - state_root_hash: Option, - timestamp: Option, - era: Option, - height: Option, - protocol_version: ProtocolVersion, - deploys: Vec, - is_switch: Option, - validator_weights: Option, -} - -impl Default for TestBlockV1Builder { - fn default() -> Self { - Self { - parent_hash: None, - state_root_hash: None, - timestamp: None, - era: None, - height: None, - protocol_version: ProtocolVersion::V1_0_0, - deploys: Vec::new(), - is_switch: None, - validator_weights: None, - } - } -} - -impl TestBlockV1Builder { - /// Creates new `TestBlockBuilder`. - pub fn new() -> Self { - Self::default() - } - - /// Sets the parent hash for the block. - pub fn parent_hash(self, parent_hash: BlockHash) -> Self { - Self { - parent_hash: Some(parent_hash), - ..self - } - } - - /// Sets the state root hash for the block. - pub fn state_root_hash(self, state_root_hash: Digest) -> Self { - Self { - state_root_hash: Some(state_root_hash), - ..self - } - } - - /// Sets the timestamp for the block. - pub fn timestamp(self, timestamp: Timestamp) -> Self { - Self { - timestamp: Some(timestamp), - ..self - } - } - - /// Sets the era for the block - pub fn era(self, era: impl Into) -> Self { - Self { - era: Some(era.into()), - ..self - } - } - - /// Sets the height for the block. - pub fn height(self, height: u64) -> Self { - Self { - height: Some(height), - ..self - } - } - - /// Sets the protocol version for the block. - pub fn protocol_version(self, protocol_version: ProtocolVersion) -> Self { - Self { - protocol_version, - ..self - } - } - - /// Associates the given deploys with the created block. - pub fn deploys<'a, I: IntoIterator>(self, deploys_iter: I) -> Self { - Self { - deploys: deploys_iter.into_iter().cloned().collect(), - ..self - } - } - - /// Associates a number of random deploys with the created block. - pub fn random_deploys(mut self, count: usize, rng: &mut TestRng) -> Self { - self.deploys = iter::repeat(()) - .take(count) - .map(|_| Deploy::random(rng)) - .collect(); - self - } - - /// Allows setting the created block to be switch block or not. - pub fn switch_block(self, is_switch: bool) -> Self { - Self { - is_switch: Some(is_switch), - ..self - } - } - - /// Sets the validator weights for the block. - pub fn validator_weights(self, validator_weights: ValidatorWeights) -> Self { - Self { - validator_weights: Some(validator_weights), - ..self - } - } - - /// Builds the block. - pub fn build(self, rng: &mut TestRng) -> BlockV1 { - let Self { - parent_hash, - state_root_hash, - timestamp, - era, - height, - protocol_version, - deploys, - is_switch, - validator_weights, - } = self; - - let parent_hash = parent_hash.unwrap_or_else(|| BlockHash::new(rng.gen())); - let parent_seed = Digest::random(rng); - let state_root_hash = state_root_hash.unwrap_or_else(|| rng.gen()); - let random_bit = rng.gen(); - let is_switch = is_switch.unwrap_or_else(|| rng.gen_bool(0.1)); - let era_end = is_switch.then(|| { - let next_era_validator_weights = validator_weights.unwrap_or_else(|| { - (1..6) - .map(|i| (PublicKey::random(rng), U512::from(i))) - .take(6) - .collect() - }); - EraEndV1::new(EraReport::random(rng), next_era_validator_weights) - }); - let timestamp = timestamp.unwrap_or_else(Timestamp::now); - let era_id = era.unwrap_or(EraId::random(rng)); - let height = height.unwrap_or_else(|| era_id.value() * 10 + rng.gen_range(0..10)); - let proposer = PublicKey::random(rng); - let deploy_hashes = deploys.iter().map(|deploy| *deploy.hash()).collect(); - let transfer_hashes = vec![]; - - BlockV1::new( - parent_hash, - parent_seed, - state_root_hash, - random_bit, - era_end, - timestamp, - era_id, - height, - protocol_version, - proposer, - deploy_hashes, - transfer_hashes, - ) - } - - /// Builds the block as a versioned block. - pub fn build_versioned(self, rng: &mut TestRng) -> Block { - self.build(rng).into() - } -} diff --git a/casper_types_ver_2_0/src/block/test_block_builder/test_block_v2_builder.rs b/casper_types_ver_2_0/src/block/test_block_builder/test_block_v2_builder.rs deleted file mode 100644 index b6a8324f..00000000 --- a/casper_types_ver_2_0/src/block/test_block_builder/test_block_v2_builder.rs +++ /dev/null @@ -1,275 +0,0 @@ -use std::iter; - -use alloc::collections::BTreeMap; -use rand::Rng; - -use crate::{ - system::auction::ValidatorWeights, testing::TestRng, Block, BlockHash, BlockV2, Digest, - EraEndV2, EraId, ProtocolVersion, PublicKey, RewardedSignatures, Timestamp, Transaction, - TransactionEntryPoint, TransactionSessionKind, TransactionTarget, U512, -}; - -/// A helper to build the blocks with various properties required for tests. -pub struct TestBlockV2Builder { - parent_hash: Option, - state_root_hash: Option, - timestamp: Option, - era: Option, - height: Option, - proposer: Option, - protocol_version: ProtocolVersion, - txns: Vec, - is_switch: Option, - validator_weights: Option, - rewarded_signatures: Option, -} - -impl Default for TestBlockV2Builder { - fn default() -> Self { - Self { - parent_hash: None, - state_root_hash: None, - timestamp: None, - era: None, - height: None, - proposer: None, - protocol_version: ProtocolVersion::V1_0_0, - txns: Vec::new(), - is_switch: None, - validator_weights: None, - rewarded_signatures: None, - } - } -} - -impl TestBlockV2Builder { - /// Creates new `TestBlockBuilder`. - pub fn new() -> Self { - Self::default() - } - - /// Sets the parent hash for the block. - pub fn parent_hash(self, parent_hash: BlockHash) -> Self { - Self { - parent_hash: Some(parent_hash), - ..self - } - } - - /// Sets the state root hash for the block. - pub fn state_root_hash(self, state_root_hash: Digest) -> Self { - Self { - state_root_hash: Some(state_root_hash), - ..self - } - } - - /// Sets the timestamp for the block. - pub fn timestamp(self, timestamp: Timestamp) -> Self { - Self { - timestamp: Some(timestamp), - ..self - } - } - - /// Sets the era for the block - pub fn era(self, era: impl Into) -> Self { - Self { - era: Some(era.into()), - ..self - } - } - - /// Sets the height for the block. - pub fn height(self, height: u64) -> Self { - Self { - height: Some(height), - ..self - } - } - - /// Sets the block proposer. - pub fn proposer(self, proposer: PublicKey) -> Self { - Self { - proposer: Some(proposer), - ..self - } - } - - /// Sets the protocol version for the block. - pub fn protocol_version(self, protocol_version: ProtocolVersion) -> Self { - Self { - protocol_version, - ..self - } - } - - /// Associates the given transactions with the created block. - pub fn transactions<'a, I: IntoIterator>(self, txns_iter: I) -> Self { - Self { - txns: txns_iter.into_iter().cloned().collect(), - ..self - } - } - - /// Sets the height for the block. - pub fn rewarded_signatures(self, rewarded_signatures: RewardedSignatures) -> Self { - Self { - rewarded_signatures: Some(rewarded_signatures), - ..self - } - } - - /// Associates a number of random transactions with the created block. - pub fn random_transactions(mut self, count: usize, rng: &mut TestRng) -> Self { - self.txns = iter::repeat_with(|| Transaction::random(rng)) - .take(count) - .collect(); - self - } - - /// Allows setting the created block to be switch block or not. - pub fn switch_block(self, is_switch: bool) -> Self { - Self { - is_switch: Some(is_switch), - ..self - } - } - - /// Sets the validator weights for the block. - pub fn validator_weights(self, validator_weights: ValidatorWeights) -> Self { - Self { - validator_weights: Some(validator_weights), - ..self - } - } - - /// Builds the block. - pub fn build(self, rng: &mut TestRng) -> BlockV2 { - let Self { - parent_hash, - state_root_hash, - timestamp, - era, - height, - proposer, - protocol_version, - txns, - is_switch, - validator_weights, - rewarded_signatures, - } = self; - - let parent_hash = parent_hash.unwrap_or_else(|| BlockHash::new(rng.gen())); - let parent_seed = Digest::random(rng); - let state_root_hash = state_root_hash.unwrap_or_else(|| rng.gen()); - let random_bit = rng.gen(); - let is_switch = is_switch.unwrap_or_else(|| rng.gen_bool(0.1)); - let era_end = is_switch.then(|| gen_era_end_v2(rng, validator_weights)); - let timestamp = timestamp.unwrap_or_else(Timestamp::now); - let era_id = era.unwrap_or(EraId::random(rng)); - let height = height.unwrap_or_else(|| era_id.value() * 10 + rng.gen_range(0..10)); - let proposer = proposer.unwrap_or_else(|| PublicKey::random(rng)); - - let mut transfer_hashes = vec![]; - let mut staking_hashes = vec![]; - let mut install_upgrade_hashes = vec![]; - let mut standard_hashes = vec![]; - for txn in txns { - let txn_hash = txn.hash(); - match txn { - Transaction::Deploy(deploy) => { - if deploy.session().is_transfer() { - transfer_hashes.push(txn_hash); - } else { - standard_hashes.push(txn_hash); - } - } - Transaction::V1(v1_txn) => match v1_txn.target() { - TransactionTarget::Native => match v1_txn.entry_point() { - TransactionEntryPoint::Transfer => transfer_hashes.push(txn_hash), - TransactionEntryPoint::Custom(_) - | TransactionEntryPoint::AddBid - | TransactionEntryPoint::WithdrawBid - | TransactionEntryPoint::Delegate - | TransactionEntryPoint::Undelegate - | TransactionEntryPoint::Redelegate => staking_hashes.push(txn_hash), - }, - TransactionTarget::Stored { .. } => standard_hashes.push(txn_hash), - TransactionTarget::Session { kind, .. } => match kind { - TransactionSessionKind::Standard | TransactionSessionKind::Isolated => { - standard_hashes.push(txn_hash) - } - TransactionSessionKind::Installer | TransactionSessionKind::Upgrader => { - install_upgrade_hashes.push(txn_hash) - } - }, - }, - } - } - let rewarded_signatures = rewarded_signatures.unwrap_or_default(); - - BlockV2::new( - parent_hash, - parent_seed, - state_root_hash, - random_bit, - era_end, - timestamp, - era_id, - height, - protocol_version, - proposer, - transfer_hashes, - staking_hashes, - install_upgrade_hashes, - standard_hashes, - rewarded_signatures, - ) - } - - /// Builds the block as a versioned block. - pub fn build_versioned(self, rng: &mut TestRng) -> Block { - self.build(rng).into() - } - - /// Builds a block that is invalid. - pub fn build_invalid(self, rng: &mut TestRng) -> BlockV2 { - self.build(rng).make_invalid(rng) - } -} - -fn gen_era_end_v2( - rng: &mut TestRng, - validator_weights: Option>, -) -> EraEndV2 { - let equivocators_count = rng.gen_range(0..5); - let rewards_count = rng.gen_range(0..5); - let inactive_count = rng.gen_range(0..5); - let next_era_validator_weights = validator_weights.unwrap_or_else(|| { - (1..6) - .map(|i| (PublicKey::random(rng), U512::from(i))) - .take(6) - .collect() - }); - let equivocators = iter::repeat_with(|| PublicKey::random(rng)) - .take(equivocators_count) - .collect(); - let rewards = iter::repeat_with(|| { - let pub_key = PublicKey::random(rng); - let reward = rng.gen_range(1..=1_000_000_000 + 1); - (pub_key, U512::from(reward)) - }) - .take(rewards_count) - .collect(); - let inactive_validators = iter::repeat_with(|| PublicKey::random(rng)) - .take(inactive_count) - .collect(); - - EraEndV2::new( - equivocators, - inactive_validators, - next_era_validator_weights, - rewards, - ) -} diff --git a/casper_types_ver_2_0/src/block_time.rs b/casper_types_ver_2_0/src/block_time.rs deleted file mode 100644 index f278a36b..00000000 --- a/casper_types_ver_2_0/src/block_time.rs +++ /dev/null @@ -1,55 +0,0 @@ -use alloc::vec::Vec; - -use crate::bytesrepr::{Error, FromBytes, ToBytes, U64_SERIALIZED_LENGTH}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -/// The number of bytes in a serialized [`BlockTime`]. -pub const BLOCKTIME_SERIALIZED_LENGTH: usize = U64_SERIALIZED_LENGTH; - -/// A newtype wrapping a [`u64`] which represents the block time. -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[derive(Clone, Copy, Default, Debug, PartialEq, Eq, PartialOrd, Serialize, Deserialize)] -pub struct BlockTime(u64); - -impl BlockTime { - /// Constructs a `BlockTime`. - pub fn new(value: u64) -> Self { - BlockTime(value) - } - - /// Saturating integer subtraction. Computes `self - other`, saturating at `0` instead of - /// overflowing. - #[must_use] - pub fn saturating_sub(self, other: BlockTime) -> Self { - BlockTime(self.0.saturating_sub(other.0)) - } -} - -impl From for u64 { - fn from(blocktime: BlockTime) -> Self { - blocktime.0 - } -} - -impl ToBytes for BlockTime { - fn to_bytes(&self) -> Result, Error> { - self.0.to_bytes() - } - - fn serialized_length(&self) -> usize { - BLOCKTIME_SERIALIZED_LENGTH - } -} - -impl FromBytes for BlockTime { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (time, rem) = FromBytes::from_bytes(bytes)?; - Ok((BlockTime::new(time), rem)) - } -} diff --git a/casper_types_ver_2_0/src/byte_code.rs b/casper_types_ver_2_0/src/byte_code.rs deleted file mode 100644 index 1e7605d0..00000000 --- a/casper_types_ver_2_0/src/byte_code.rs +++ /dev/null @@ -1,467 +0,0 @@ -use alloc::{format, string::String, vec::Vec}; -use core::{ - array::TryFromSliceError, - convert::TryFrom, - fmt::{self, Debug, Display, Formatter}, -}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(any(feature = "testing", test))] -use rand::{ - distributions::{Distribution, Standard}, - Rng, -}; -#[cfg(feature = "json-schema")] -use schemars::{gen::SchemaGenerator, schema::Schema, JsonSchema}; -use serde::{de::Error as SerdeError, Deserialize, Deserializer, Serialize, Serializer}; - -use crate::{ - addressable_entity, bytesrepr, - bytesrepr::{Bytes, Error, FromBytes, ToBytes, U8_SERIALIZED_LENGTH}, - checksummed_hex, - key::ByteCodeAddr, - uref, CLType, CLTyped, -}; - -const BYTE_CODE_MAX_DISPLAY_LEN: usize = 16; -const KEY_HASH_LENGTH: usize = 32; -const WASM_STRING_PREFIX: &str = "contract-wasm-"; - -/// Associated error type of `TryFrom<&[u8]>` for `ByteCodeHash`. -#[derive(Debug)] -pub struct TryFromSliceForContractHashError(()); - -#[derive(Debug)] -#[non_exhaustive] -pub enum FromStrError { - InvalidPrefix, - Hex(base16::DecodeError), - Hash(TryFromSliceError), - AccountHash(addressable_entity::FromAccountHashStrError), - URef(uref::FromStrError), -} - -impl From for FromStrError { - fn from(error: base16::DecodeError) -> Self { - FromStrError::Hex(error) - } -} - -impl From for FromStrError { - fn from(error: TryFromSliceError) -> Self { - FromStrError::Hash(error) - } -} - -impl From for FromStrError { - fn from(error: addressable_entity::FromAccountHashStrError) -> Self { - FromStrError::AccountHash(error) - } -} - -impl From for FromStrError { - fn from(error: uref::FromStrError) -> Self { - FromStrError::URef(error) - } -} - -impl Display for FromStrError { - fn fmt(&self, f: &mut Formatter) -> fmt::Result { - match self { - FromStrError::InvalidPrefix => write!(f, "invalid prefix"), - FromStrError::Hex(error) => write!(f, "decode from hex: {}", error), - FromStrError::Hash(error) => write!(f, "hash from string error: {}", error), - FromStrError::AccountHash(error) => { - write!(f, "account hash from string error: {:?}", error) - } - FromStrError::URef(error) => write!(f, "uref from string error: {:?}", error), - } - } -} - -/// A newtype wrapping a `HashAddr` which is the raw bytes of -/// the ByteCodeHash -#[derive(Default, PartialOrd, Ord, PartialEq, Eq, Hash, Clone, Copy)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -pub struct ByteCodeHash(ByteCodeAddr); - -impl ByteCodeHash { - /// Constructs a new `ByteCodeHash` from the raw bytes of the contract wasm hash. - pub const fn new(value: ByteCodeAddr) -> ByteCodeHash { - ByteCodeHash(value) - } - - /// Returns the raw bytes of the contract hash as an array. - pub fn value(&self) -> ByteCodeAddr { - self.0 - } - - /// Returns the raw bytes of the contract hash as a `slice`. - pub fn as_bytes(&self) -> &[u8] { - &self.0 - } - - /// Formats the `ByteCodeHash` for users getting and putting. - pub fn to_formatted_string(self) -> String { - format!("{}{}", WASM_STRING_PREFIX, base16::encode_lower(&self.0),) - } - - /// Parses a string formatted as per `Self::to_formatted_string()` into a - /// `ByteCodeHash`. - pub fn from_formatted_str(input: &str) -> Result { - let remainder = input - .strip_prefix(WASM_STRING_PREFIX) - .ok_or(FromStrError::InvalidPrefix)?; - let bytes = ByteCodeAddr::try_from(checksummed_hex::decode(remainder)?.as_ref())?; - Ok(ByteCodeHash(bytes)) - } -} - -impl Display for ByteCodeHash { - fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result { - write!(f, "{}", base16::encode_lower(&self.0)) - } -} - -impl Debug for ByteCodeHash { - fn fmt(&self, f: &mut Formatter) -> core::fmt::Result { - write!(f, "ByteCodeHash({})", base16::encode_lower(&self.0)) - } -} - -impl CLTyped for ByteCodeHash { - fn cl_type() -> CLType { - CLType::ByteArray(KEY_HASH_LENGTH as u32) - } -} - -impl ToBytes for ByteCodeHash { - #[inline(always)] - fn to_bytes(&self) -> Result, Error> { - self.0.to_bytes() - } - - #[inline(always)] - fn serialized_length(&self) -> usize { - self.0.serialized_length() - } - - #[inline(always)] - fn write_bytes(&self, writer: &mut Vec) -> Result<(), Error> { - self.0.write_bytes(writer) - } -} - -impl FromBytes for ByteCodeHash { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (bytes, rem) = FromBytes::from_bytes(bytes)?; - Ok((ByteCodeHash::new(bytes), rem)) - } -} - -impl From<[u8; 32]> for ByteCodeHash { - fn from(bytes: [u8; 32]) -> Self { - ByteCodeHash(bytes) - } -} - -impl Serialize for ByteCodeHash { - fn serialize(&self, serializer: S) -> Result { - if serializer.is_human_readable() { - self.to_formatted_string().serialize(serializer) - } else { - self.0.serialize(serializer) - } - } -} - -impl<'de> Deserialize<'de> for ByteCodeHash { - fn deserialize>(deserializer: D) -> Result { - if deserializer.is_human_readable() { - let formatted_string = String::deserialize(deserializer)?; - ByteCodeHash::from_formatted_str(&formatted_string).map_err(SerdeError::custom) - } else { - let bytes = ByteCodeAddr::deserialize(deserializer)?; - Ok(ByteCodeHash(bytes)) - } - } -} - -impl AsRef<[u8]> for ByteCodeHash { - fn as_ref(&self) -> &[u8] { - self.0.as_ref() - } -} - -impl TryFrom<&[u8]> for ByteCodeHash { - type Error = TryFromSliceForContractHashError; - - fn try_from(bytes: &[u8]) -> Result { - ByteCodeAddr::try_from(bytes) - .map(ByteCodeHash::new) - .map_err(|_| TryFromSliceForContractHashError(())) - } -} - -impl TryFrom<&Vec> for ByteCodeHash { - type Error = TryFromSliceForContractHashError; - - fn try_from(bytes: &Vec) -> Result { - ByteCodeAddr::try_from(bytes as &[u8]) - .map(ByteCodeHash::new) - .map_err(|_| TryFromSliceForContractHashError(())) - } -} - -#[cfg(feature = "json-schema")] -impl JsonSchema for ByteCodeHash { - fn schema_name() -> String { - String::from("ByteCodeHash") - } - - fn json_schema(gen: &mut SchemaGenerator) -> Schema { - let schema = gen.subschema_for::(); - let mut schema_object = schema.into_object(); - schema_object.metadata().description = - Some("The hash address of the contract wasm".to_string()); - schema_object.into() - } -} - -/// The type of Byte code. -#[repr(u8)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[derive(PartialEq, Eq, Clone, Copy, PartialOrd, Ord, Hash, Serialize, Deserialize)] -pub enum ByteCodeKind { - /// Empty byte code. - Empty = 0, - /// Byte code to be executed with the version 1 Casper execution engine. - V1CasperWasm = 1, -} - -impl ToBytes for ByteCodeKind { - fn to_bytes(&self) -> Result, Error> { - (*self as u8).to_bytes() - } - - fn serialized_length(&self) -> usize { - U8_SERIALIZED_LENGTH - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), Error> { - (*self as u8).write_bytes(writer) - } -} - -impl FromBytes for ByteCodeKind { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (byte_code_kind, remainder) = u8::from_bytes(bytes)?; - match byte_code_kind { - byte_code_kind if byte_code_kind == ByteCodeKind::Empty as u8 => { - Ok((ByteCodeKind::Empty, remainder)) - } - byte_code_kind if byte_code_kind == ByteCodeKind::V1CasperWasm as u8 => { - Ok((ByteCodeKind::V1CasperWasm, remainder)) - } - _ => Err(Error::Formatting), - } - } -} - -impl Display for ByteCodeKind { - fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { - match self { - ByteCodeKind::Empty => { - write!(f, "empty") - } - ByteCodeKind::V1CasperWasm => { - write!(f, "v1-casper-wasm") - } - } - } -} - -#[cfg(any(feature = "testing", test))] -impl Distribution for Standard { - fn sample(&self, rng: &mut R) -> ByteCodeKind { - match rng.gen_range(0..=1) { - 0 => ByteCodeKind::Empty, - 1 => ByteCodeKind::V1CasperWasm, - _ => unreachable!(), - } - } -} - -/// A container for contract's Wasm bytes. -#[derive(PartialEq, Eq, Clone, Serialize, Deserialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -pub struct ByteCode { - kind: ByteCodeKind, - bytes: Bytes, -} - -impl Debug for ByteCode { - fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { - if self.bytes.len() > BYTE_CODE_MAX_DISPLAY_LEN { - write!( - f, - "ByteCode(0x{}...)", - base16::encode_lower(&self.bytes[..BYTE_CODE_MAX_DISPLAY_LEN]) - ) - } else { - write!(f, "ByteCode(0x{})", base16::encode_lower(&self.bytes)) - } - } -} - -impl ByteCode { - /// Creates new Wasm object from bytes. - pub fn new(kind: ByteCodeKind, bytes: Vec) -> Self { - ByteCode { - kind, - bytes: bytes.into(), - } - } - - /// Consumes instance of [`ByteCode`] and returns its bytes. - pub fn take_bytes(self) -> Vec { - self.bytes.into() - } - - /// Returns a slice of contained Wasm bytes. - pub fn bytes(&self) -> &[u8] { - self.bytes.as_ref() - } - - /// Return the type of byte code. - pub fn kind(&self) -> ByteCodeKind { - self.kind - } -} - -impl ToBytes for ByteCode { - fn to_bytes(&self) -> Result, Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - self.kind.serialized_length() + self.bytes.serialized_length() - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), Error> { - self.kind.write_bytes(writer)?; - self.bytes.write_bytes(writer)?; - Ok(()) - } -} - -impl FromBytes for ByteCode { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (kind, remainder) = ByteCodeKind::from_bytes(bytes)?; - let (bytes, remainder) = Bytes::from_bytes(remainder)?; - Ok((ByteCode { kind, bytes }, remainder)) - } -} - -#[cfg(test)] -mod tests { - use rand::RngCore; - - use super::*; - use crate::testing::TestRng; - - #[test] - fn debug_repr_of_short_wasm() { - const SIZE: usize = 8; - let wasm_bytes = vec![0; SIZE]; - let byte_code = ByteCode::new(ByteCodeKind::V1CasperWasm, wasm_bytes); - assert_eq!(format!("{:?}", byte_code), "ByteCode(0x0000000000000000)"); - } - - #[test] - fn debug_repr_of_long_wasm() { - const SIZE: usize = 65; - let wasm_bytes = vec![0; SIZE]; - let byte_code = ByteCode::new(ByteCodeKind::V1CasperWasm, wasm_bytes); - // String output is less than the bytes itself - assert_eq!( - format!("{:?}", byte_code), - "ByteCode(0x00000000000000000000000000000000...)" - ); - } - - #[test] - fn byte_code_bytesrepr_roundtrip() { - let rng = &mut TestRng::new(); - let byte_code = ByteCode::new(rng.gen(), vec![]); - bytesrepr::test_serialization_roundtrip(&byte_code); - - let mut buffer = vec![0u8; rng.gen_range(1..100)]; - rng.fill_bytes(buffer.as_mut()); - let byte_code = ByteCode::new(rng.gen(), buffer); - bytesrepr::test_serialization_roundtrip(&byte_code); - } - - #[test] - fn contract_wasm_hash_from_slice() { - let bytes: Vec = (0..32).collect(); - let byte_code_hash = - ByteCodeAddr::try_from(&bytes[..]).expect("should create byte code hash"); - let contract_hash = ByteCodeHash::new(byte_code_hash); - assert_eq!(&bytes, &contract_hash.as_bytes()); - } - - #[test] - fn contract_wasm_hash_from_str() { - let byte_code_hash = ByteCodeHash([3; 32]); - let encoded = byte_code_hash.to_formatted_string(); - let decoded = ByteCodeHash::from_formatted_str(&encoded).unwrap(); - assert_eq!(byte_code_hash, decoded); - - let invalid_prefix = - "contractwasm-0000000000000000000000000000000000000000000000000000000000000000"; - assert!(ByteCodeHash::from_formatted_str(invalid_prefix).is_err()); - - let short_addr = - "contract-wasm-00000000000000000000000000000000000000000000000000000000000000"; - assert!(ByteCodeHash::from_formatted_str(short_addr).is_err()); - - let long_addr = - "contract-wasm-000000000000000000000000000000000000000000000000000000000000000000"; - assert!(ByteCodeHash::from_formatted_str(long_addr).is_err()); - - let invalid_hex = - "contract-wasm-000000000000000000000000000000000000000000000000000000000000000g"; - assert!(ByteCodeHash::from_formatted_str(invalid_hex).is_err()); - } - - #[test] - fn contract_wasm_hash_bytesrepr_roundtrip() { - let rng = &mut TestRng::new(); - let byte_code_hash = ByteCodeHash(rng.gen()); - bytesrepr::test_serialization_roundtrip(&byte_code_hash); - } - - #[test] - fn contract_wasm_hash_bincode_roundtrip() { - let rng = &mut TestRng::new(); - let byte_code_hash = ByteCodeHash(rng.gen()); - let serialized = bincode::serialize(&byte_code_hash).unwrap(); - let deserialized = bincode::deserialize(&serialized).unwrap(); - assert_eq!(byte_code_hash, deserialized) - } - - #[test] - fn contract_wasm_hash_json_roundtrip() { - let rng = &mut TestRng::new(); - let byte_code_hash = ByteCodeHash(rng.gen()); - let json_string = serde_json::to_string_pretty(&byte_code_hash).unwrap(); - let decoded = serde_json::from_str(&json_string).unwrap(); - assert_eq!(byte_code_hash, decoded) - } -} diff --git a/casper_types_ver_2_0/src/bytesrepr.rs b/casper_types_ver_2_0/src/bytesrepr.rs deleted file mode 100644 index e66087b5..00000000 --- a/casper_types_ver_2_0/src/bytesrepr.rs +++ /dev/null @@ -1,1646 +0,0 @@ -//! Contains serialization and deserialization code for types used throughout the system. -mod bytes; - -use alloc::{ - alloc::{alloc, Layout}, - collections::{BTreeMap, BTreeSet, VecDeque}, - str, - string::String, - vec, - vec::Vec, -}; -#[cfg(debug_assertions)] -use core::any; -use core::{ - convert::TryInto, - fmt::{self, Display, Formatter}, - mem, - ptr::NonNull, -}; -#[cfg(feature = "std")] -use std::error::Error as StdError; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -use num_integer::Integer; -use num_rational::Ratio; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -pub use bytes::Bytes; - -/// The number of bytes in a serialized `()`. -pub const UNIT_SERIALIZED_LENGTH: usize = 0; -/// The number of bytes in a serialized `bool`. -pub const BOOL_SERIALIZED_LENGTH: usize = 1; -/// The number of bytes in a serialized `i32`. -pub const I32_SERIALIZED_LENGTH: usize = mem::size_of::(); -/// The number of bytes in a serialized `i64`. -pub const I64_SERIALIZED_LENGTH: usize = mem::size_of::(); -/// The number of bytes in a serialized `u8`. -pub const U8_SERIALIZED_LENGTH: usize = mem::size_of::(); -/// The number of bytes in a serialized `u16`. -pub const U16_SERIALIZED_LENGTH: usize = mem::size_of::(); -/// The number of bytes in a serialized `u32`. -pub const U32_SERIALIZED_LENGTH: usize = mem::size_of::(); -/// The number of bytes in a serialized `u64`. -pub const U64_SERIALIZED_LENGTH: usize = mem::size_of::(); -/// The number of bytes in a serialized [`U128`](crate::U128). -pub const U128_SERIALIZED_LENGTH: usize = mem::size_of::(); -/// The number of bytes in a serialized [`U256`](crate::U256). -pub const U256_SERIALIZED_LENGTH: usize = U128_SERIALIZED_LENGTH * 2; -/// The number of bytes in a serialized [`U512`](crate::U512). -pub const U512_SERIALIZED_LENGTH: usize = U256_SERIALIZED_LENGTH * 2; -/// The tag representing a `None` value. -pub const OPTION_NONE_TAG: u8 = 0; -/// The tag representing a `Some` value. -pub const OPTION_SOME_TAG: u8 = 1; -/// The tag representing an `Err` value. -pub const RESULT_ERR_TAG: u8 = 0; -/// The tag representing an `Ok` value. -pub const RESULT_OK_TAG: u8 = 1; - -/// A type which can be serialized to a `Vec`. -pub trait ToBytes { - /// Serializes `&self` to a `Vec`. - fn to_bytes(&self) -> Result, Error>; - /// Consumes `self` and serializes to a `Vec`. - fn into_bytes(self) -> Result, Error> - where - Self: Sized, - { - self.to_bytes() - } - /// Returns the length of the `Vec` which would be returned from a successful call to - /// `to_bytes()` or `into_bytes()`. The data is not actually serialized, so this call is - /// relatively cheap. - fn serialized_length(&self) -> usize; - - /// Writes `&self` into a mutable `writer`. - fn write_bytes(&self, writer: &mut Vec) -> Result<(), Error> { - writer.extend(self.to_bytes()?); - Ok(()) - } -} - -/// A type which can be deserialized from a `Vec`. -pub trait FromBytes: Sized { - /// Deserializes the slice into `Self`. - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error>; - - /// Deserializes the `Vec` into `Self`. - fn from_vec(bytes: Vec) -> Result<(Self, Vec), Error> { - Self::from_bytes(bytes.as_slice()).map(|(x, remainder)| (x, Vec::from(remainder))) - } -} - -/// Returns a `Vec` initialized with sufficient capacity to hold `to_be_serialized` after -/// serialization. -pub fn unchecked_allocate_buffer(to_be_serialized: &T) -> Vec { - let serialized_length = to_be_serialized.serialized_length(); - Vec::with_capacity(serialized_length) -} - -/// Returns a `Vec` initialized with sufficient capacity to hold `to_be_serialized` after -/// serialization, or an error if the capacity would exceed `u32::max_value()`. -pub fn allocate_buffer(to_be_serialized: &T) -> Result, Error> { - let serialized_length = to_be_serialized.serialized_length(); - if serialized_length > u32::max_value() as usize { - return Err(Error::OutOfMemory); - } - Ok(Vec::with_capacity(serialized_length)) -} - -/// Serialization and deserialization errors. -#[derive(Copy, Debug, PartialEq, Eq, Clone, Serialize, Deserialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr( - feature = "json-schema", - derive(JsonSchema), - schemars(rename = "BytesreprError") -)] -#[repr(u8)] -#[non_exhaustive] -pub enum Error { - /// Early end of stream while deserializing. - EarlyEndOfStream = 0, - /// Formatting error while deserializing. - Formatting, - /// Not all input bytes were consumed in [`deserialize`]. - LeftOverBytes, - /// Out of memory error. - OutOfMemory, - /// No serialized representation is available for a value. - NotRepresentable, - /// Exceeded a recursion depth limit. - ExceededRecursionDepth, -} - -impl Display for Error { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - match self { - Error::EarlyEndOfStream => { - formatter.write_str("Deserialization error: early end of stream") - } - Error::Formatting => formatter.write_str("Deserialization error: formatting"), - Error::LeftOverBytes => formatter.write_str("Deserialization error: left-over bytes"), - Error::OutOfMemory => formatter.write_str("Serialization error: out of memory"), - Error::NotRepresentable => { - formatter.write_str("Serialization error: value is not representable.") - } - Error::ExceededRecursionDepth => formatter.write_str("exceeded recursion depth"), - } - } -} - -impl ToBytes for Error { - fn write_bytes(&self, writer: &mut Vec) -> Result<(), Error> { - (*self as u8).write_bytes(writer) - } - - fn to_bytes(&self) -> Result, Error> { - (*self as u8).to_bytes() - } - - fn serialized_length(&self) -> usize { - U8_SERIALIZED_LENGTH - } -} - -impl FromBytes for Error { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (value, remainder) = u8::from_bytes(bytes)?; - match value { - value if value == Error::EarlyEndOfStream as u8 => { - Ok((Error::EarlyEndOfStream, remainder)) - } - value if value == Error::Formatting as u8 => Ok((Error::Formatting, remainder)), - value if value == Error::LeftOverBytes as u8 => Ok((Error::LeftOverBytes, remainder)), - value if value == Error::OutOfMemory as u8 => Ok((Error::OutOfMemory, remainder)), - value if value == Error::NotRepresentable as u8 => { - Ok((Error::NotRepresentable, remainder)) - } - value if value == Error::ExceededRecursionDepth as u8 => { - Ok((Error::ExceededRecursionDepth, remainder)) - } - _ => Err(Error::Formatting), - } - } -} - -#[cfg(feature = "std")] -impl StdError for Error {} - -/// Deserializes `bytes` into an instance of `T`. -/// -/// Returns an error if the bytes cannot be deserialized into `T` or if not all of the input bytes -/// are consumed in the operation. -pub fn deserialize(bytes: Vec) -> Result { - let (t, remainder) = T::from_bytes(&bytes)?; - if remainder.is_empty() { - Ok(t) - } else { - Err(Error::LeftOverBytes) - } -} - -/// Deserializes a slice of bytes into an instance of `T`. -/// -/// Returns an error if the bytes cannot be deserialized into `T` or if not all of the input bytes -/// are consumed in the operation. -pub fn deserialize_from_slice, O: FromBytes>(bytes: I) -> Result { - let (t, remainder) = O::from_bytes(bytes.as_ref())?; - if remainder.is_empty() { - Ok(t) - } else { - Err(Error::LeftOverBytes) - } -} - -/// Serializes `t` into a `Vec`. -pub fn serialize(t: impl ToBytes) -> Result, Error> { - t.into_bytes() -} - -/// Safely splits the slice at the given point. -pub(crate) fn safe_split_at(bytes: &[u8], n: usize) -> Result<(&[u8], &[u8]), Error> { - if n > bytes.len() { - Err(Error::EarlyEndOfStream) - } else { - Ok(bytes.split_at(n)) - } -} - -impl ToBytes for () { - fn to_bytes(&self) -> Result, Error> { - Ok(Vec::new()) - } - - fn serialized_length(&self) -> usize { - UNIT_SERIALIZED_LENGTH - } -} - -impl FromBytes for () { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - Ok(((), bytes)) - } -} - -impl ToBytes for bool { - fn to_bytes(&self) -> Result, Error> { - u8::from(*self).to_bytes() - } - - fn serialized_length(&self) -> usize { - BOOL_SERIALIZED_LENGTH - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), Error> { - writer.push(*self as u8); - Ok(()) - } -} - -impl FromBytes for bool { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - match bytes.split_first() { - None => Err(Error::EarlyEndOfStream), - Some((byte, rem)) => match byte { - 1 => Ok((true, rem)), - 0 => Ok((false, rem)), - _ => Err(Error::Formatting), - }, - } - } -} - -impl ToBytes for u8 { - fn to_bytes(&self) -> Result, Error> { - Ok(vec![*self]) - } - - fn serialized_length(&self) -> usize { - U8_SERIALIZED_LENGTH - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), Error> { - writer.push(*self); - Ok(()) - } -} - -impl FromBytes for u8 { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - match bytes.split_first() { - None => Err(Error::EarlyEndOfStream), - Some((byte, rem)) => Ok((*byte, rem)), - } - } -} - -impl ToBytes for i32 { - fn to_bytes(&self) -> Result, Error> { - Ok(self.to_le_bytes().to_vec()) - } - - fn serialized_length(&self) -> usize { - I32_SERIALIZED_LENGTH - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), Error> { - writer.extend_from_slice(&self.to_le_bytes()); - Ok(()) - } -} - -impl FromBytes for i32 { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let mut result = [0u8; I32_SERIALIZED_LENGTH]; - let (bytes, remainder) = safe_split_at(bytes, I32_SERIALIZED_LENGTH)?; - result.copy_from_slice(bytes); - Ok((::from_le_bytes(result), remainder)) - } -} - -impl ToBytes for i64 { - fn to_bytes(&self) -> Result, Error> { - Ok(self.to_le_bytes().to_vec()) - } - - fn serialized_length(&self) -> usize { - I64_SERIALIZED_LENGTH - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), Error> { - writer.extend_from_slice(&self.to_le_bytes()); - Ok(()) - } -} - -impl FromBytes for i64 { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let mut result = [0u8; I64_SERIALIZED_LENGTH]; - let (bytes, remainder) = safe_split_at(bytes, I64_SERIALIZED_LENGTH)?; - result.copy_from_slice(bytes); - Ok((::from_le_bytes(result), remainder)) - } -} - -impl ToBytes for u16 { - fn to_bytes(&self) -> Result, Error> { - Ok(self.to_le_bytes().to_vec()) - } - - fn serialized_length(&self) -> usize { - U16_SERIALIZED_LENGTH - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), Error> { - writer.extend_from_slice(&self.to_le_bytes()); - Ok(()) - } -} - -impl FromBytes for u16 { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let mut result = [0u8; U16_SERIALIZED_LENGTH]; - let (bytes, remainder) = safe_split_at(bytes, U16_SERIALIZED_LENGTH)?; - result.copy_from_slice(bytes); - Ok((::from_le_bytes(result), remainder)) - } -} - -impl ToBytes for u32 { - fn to_bytes(&self) -> Result, Error> { - Ok(self.to_le_bytes().to_vec()) - } - - fn serialized_length(&self) -> usize { - U32_SERIALIZED_LENGTH - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), Error> { - writer.extend_from_slice(&self.to_le_bytes()); - Ok(()) - } -} - -impl FromBytes for u32 { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let mut result = [0u8; U32_SERIALIZED_LENGTH]; - let (bytes, remainder) = safe_split_at(bytes, U32_SERIALIZED_LENGTH)?; - result.copy_from_slice(bytes); - Ok((::from_le_bytes(result), remainder)) - } -} - -impl ToBytes for u64 { - fn to_bytes(&self) -> Result, Error> { - Ok(self.to_le_bytes().to_vec()) - } - - fn serialized_length(&self) -> usize { - U64_SERIALIZED_LENGTH - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), Error> { - writer.extend_from_slice(&self.to_le_bytes()); - Ok(()) - } -} - -impl FromBytes for u64 { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let mut result = [0u8; U64_SERIALIZED_LENGTH]; - let (bytes, remainder) = safe_split_at(bytes, U64_SERIALIZED_LENGTH)?; - result.copy_from_slice(bytes); - Ok((::from_le_bytes(result), remainder)) - } -} - -impl ToBytes for String { - fn to_bytes(&self) -> Result, Error> { - let bytes = self.as_bytes(); - u8_slice_to_bytes(bytes) - } - - fn serialized_length(&self) -> usize { - u8_slice_serialized_length(self.as_bytes()) - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), Error> { - write_u8_slice(self.as_bytes(), writer)?; - Ok(()) - } -} - -impl FromBytes for String { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (size, remainder) = u32::from_bytes(bytes)?; - let (str_bytes, remainder) = safe_split_at(remainder, size as usize)?; - let result = String::from_utf8(str_bytes.to_vec()).map_err(|_| Error::Formatting)?; - Ok((result, remainder)) - } -} - -fn ensure_efficient_serialization() { - #[cfg(debug_assertions)] - debug_assert_ne!( - any::type_name::(), - any::type_name::(), - "You should use `casper_types_ver_2_0::bytesrepr::Bytes` newtype wrapper instead of `Vec` for efficiency" - ); -} - -fn iterator_serialized_length<'a, T: 'a + ToBytes>(ts: impl Iterator) -> usize { - U32_SERIALIZED_LENGTH + ts.map(ToBytes::serialized_length).sum::() -} - -impl ToBytes for Vec { - fn to_bytes(&self) -> Result, Error> { - ensure_efficient_serialization::(); - - let mut result = try_vec_with_capacity(self.serialized_length())?; - let length_32: u32 = self.len().try_into().map_err(|_| Error::NotRepresentable)?; - result.append(&mut length_32.to_bytes()?); - - for item in self.iter() { - result.append(&mut item.to_bytes()?); - } - - Ok(result) - } - - fn into_bytes(self) -> Result, Error> { - ensure_efficient_serialization::(); - - let mut result = allocate_buffer(&self)?; - let length_32: u32 = self.len().try_into().map_err(|_| Error::NotRepresentable)?; - result.append(&mut length_32.to_bytes()?); - - for item in self { - result.append(&mut item.into_bytes()?); - } - - Ok(result) - } - - fn serialized_length(&self) -> usize { - iterator_serialized_length(self.iter()) - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), Error> { - let length_32: u32 = self.len().try_into().map_err(|_| Error::NotRepresentable)?; - writer.extend_from_slice(&length_32.to_le_bytes()); - for item in self.iter() { - item.write_bytes(writer)?; - } - Ok(()) - } -} - -// TODO Replace `try_vec_with_capacity` with `Vec::try_reserve_exact` once it's in stable. -fn try_vec_with_capacity(capacity: usize) -> Result, Error> { - // see https://doc.rust-lang.org/src/alloc/raw_vec.rs.html#75-98 - let elem_size = mem::size_of::(); - let alloc_size = capacity.checked_mul(elem_size).ok_or(Error::OutOfMemory)?; - - let ptr = if alloc_size == 0 { - NonNull::::dangling() - } else { - let align = mem::align_of::(); - let layout = Layout::from_size_align(alloc_size, align).map_err(|_| Error::OutOfMemory)?; - let raw_ptr = unsafe { alloc(layout) }; - let non_null_ptr = NonNull::::new(raw_ptr).ok_or(Error::OutOfMemory)?; - non_null_ptr.cast() - }; - unsafe { Ok(Vec::from_raw_parts(ptr.as_ptr(), 0, capacity)) } -} - -fn vec_from_vec(bytes: Vec) -> Result<(Vec, Vec), Error> { - ensure_efficient_serialization::(); - - Vec::::from_bytes(bytes.as_slice()).map(|(x, remainder)| (x, Vec::from(remainder))) -} - -impl FromBytes for Vec { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - ensure_efficient_serialization::(); - - let (count, mut stream) = u32::from_bytes(bytes)?; - - let mut result = try_vec_with_capacity(count as usize)?; - for _ in 0..count { - let (value, remainder) = T::from_bytes(stream)?; - result.push(value); - stream = remainder; - } - - Ok((result, stream)) - } - - fn from_vec(bytes: Vec) -> Result<(Self, Vec), Error> { - vec_from_vec(bytes) - } -} - -impl ToBytes for VecDeque { - fn to_bytes(&self) -> Result, Error> { - let (slice1, slice2) = self.as_slices(); - let mut result = allocate_buffer(self)?; - let length_32: u32 = self.len().try_into().map_err(|_| Error::NotRepresentable)?; - result.append(&mut length_32.to_bytes()?); - for item in slice1.iter().chain(slice2.iter()) { - result.append(&mut item.to_bytes()?); - } - Ok(result) - } - - fn into_bytes(self) -> Result, Error> { - let vec: Vec = self.into(); - vec.to_bytes() - } - - fn serialized_length(&self) -> usize { - let (slice1, slice2) = self.as_slices(); - iterator_serialized_length(slice1.iter().chain(slice2.iter())) - } -} - -impl FromBytes for VecDeque { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (vec, bytes) = Vec::from_bytes(bytes)?; - Ok((VecDeque::from(vec), bytes)) - } - - fn from_vec(bytes: Vec) -> Result<(Self, Vec), Error> { - let (vec, bytes) = vec_from_vec(bytes)?; - Ok((VecDeque::from(vec), bytes)) - } -} - -impl ToBytes for [u8; COUNT] { - #[inline(always)] - fn to_bytes(&self) -> Result, Error> { - Ok(self.to_vec()) - } - - #[inline(always)] - fn serialized_length(&self) -> usize { - COUNT - } - - #[inline(always)] - fn write_bytes(&self, writer: &mut Vec) -> Result<(), Error> { - writer.extend_from_slice(self); - Ok(()) - } -} - -impl FromBytes for [u8; COUNT] { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (bytes, rem) = safe_split_at(bytes, COUNT)?; - // SAFETY: safe_split_at makes sure `bytes` is exactly `COUNT` bytes. - let ptr = bytes.as_ptr() as *const [u8; COUNT]; - let result = unsafe { *ptr }; - Ok((result, rem)) - } -} - -impl ToBytes for BTreeSet { - fn to_bytes(&self) -> Result, Error> { - let mut result = allocate_buffer(self)?; - - let num_keys: u32 = self.len().try_into().map_err(|_| Error::NotRepresentable)?; - result.append(&mut num_keys.to_bytes()?); - - for value in self.iter() { - result.append(&mut value.to_bytes()?); - } - - Ok(result) - } - - fn serialized_length(&self) -> usize { - U32_SERIALIZED_LENGTH + self.iter().map(|v| v.serialized_length()).sum::() - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), Error> { - let length_32: u32 = self.len().try_into().map_err(|_| Error::NotRepresentable)?; - writer.extend_from_slice(&length_32.to_le_bytes()); - for value in self.iter() { - value.write_bytes(writer)?; - } - Ok(()) - } -} - -impl FromBytes for BTreeSet { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (num_keys, mut stream) = u32::from_bytes(bytes)?; - let mut result = BTreeSet::new(); - for _ in 0..num_keys { - let (v, rem) = V::from_bytes(stream)?; - result.insert(v); - stream = rem; - } - Ok((result, stream)) - } -} - -impl ToBytes for BTreeMap -where - K: ToBytes, - V: ToBytes, -{ - fn to_bytes(&self) -> Result, Error> { - let mut result = allocate_buffer(self)?; - - let num_keys: u32 = self.len().try_into().map_err(|_| Error::NotRepresentable)?; - result.append(&mut num_keys.to_bytes()?); - - for (key, value) in self.iter() { - result.append(&mut key.to_bytes()?); - result.append(&mut value.to_bytes()?); - } - - Ok(result) - } - - fn serialized_length(&self) -> usize { - U32_SERIALIZED_LENGTH - + self - .iter() - .map(|(key, value)| key.serialized_length() + value.serialized_length()) - .sum::() - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), Error> { - let length_32: u32 = self.len().try_into().map_err(|_| Error::NotRepresentable)?; - writer.extend_from_slice(&length_32.to_le_bytes()); - for (key, value) in self.iter() { - key.write_bytes(writer)?; - value.write_bytes(writer)?; - } - Ok(()) - } -} - -impl FromBytes for BTreeMap -where - K: FromBytes + Ord, - V: FromBytes, -{ - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (num_keys, mut stream) = u32::from_bytes(bytes)?; - let mut result = BTreeMap::new(); - for _ in 0..num_keys { - let (k, rem) = K::from_bytes(stream)?; - let (v, rem) = V::from_bytes(rem)?; - result.insert(k, v); - stream = rem; - } - Ok((result, stream)) - } -} - -impl ToBytes for Option { - fn to_bytes(&self) -> Result, Error> { - match self { - None => Ok(vec![OPTION_NONE_TAG]), - Some(v) => { - let mut result = allocate_buffer(self)?; - result.push(OPTION_SOME_TAG); - - let mut value = v.to_bytes()?; - result.append(&mut value); - - Ok(result) - } - } - } - - fn serialized_length(&self) -> usize { - U8_SERIALIZED_LENGTH - + match self { - Some(v) => v.serialized_length(), - None => 0, - } - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), Error> { - match self { - None => writer.push(OPTION_NONE_TAG), - Some(v) => { - writer.push(OPTION_SOME_TAG); - v.write_bytes(writer)?; - } - }; - Ok(()) - } -} - -impl FromBytes for Option { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (tag, rem) = u8::from_bytes(bytes)?; - match tag { - OPTION_NONE_TAG => Ok((None, rem)), - OPTION_SOME_TAG => { - let (t, rem) = T::from_bytes(rem)?; - Ok((Some(t), rem)) - } - _ => Err(Error::Formatting), - } - } -} - -impl ToBytes for Result { - fn to_bytes(&self) -> Result, Error> { - let mut result = allocate_buffer(self)?; - let (variant, mut value) = match self { - Err(error) => (RESULT_ERR_TAG, error.to_bytes()?), - Ok(result) => (RESULT_OK_TAG, result.to_bytes()?), - }; - result.push(variant); - result.append(&mut value); - Ok(result) - } - - fn serialized_length(&self) -> usize { - U8_SERIALIZED_LENGTH - + match self { - Ok(ok) => ok.serialized_length(), - Err(error) => error.serialized_length(), - } - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), Error> { - match self { - Err(error) => { - writer.push(RESULT_ERR_TAG); - error.write_bytes(writer)?; - } - Ok(result) => { - writer.push(RESULT_OK_TAG); - result.write_bytes(writer)?; - } - }; - Ok(()) - } -} - -impl FromBytes for Result { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (variant, rem) = u8::from_bytes(bytes)?; - match variant { - RESULT_ERR_TAG => { - let (value, rem) = E::from_bytes(rem)?; - Ok((Err(value), rem)) - } - RESULT_OK_TAG => { - let (value, rem) = T::from_bytes(rem)?; - Ok((Ok(value), rem)) - } - _ => Err(Error::Formatting), - } - } -} - -impl ToBytes for (T1,) { - fn to_bytes(&self) -> Result, Error> { - self.0.to_bytes() - } - - fn serialized_length(&self) -> usize { - self.0.serialized_length() - } -} - -impl FromBytes for (T1,) { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (t1, remainder) = T1::from_bytes(bytes)?; - Ok(((t1,), remainder)) - } -} - -impl ToBytes for (T1, T2) { - fn to_bytes(&self) -> Result, Error> { - let mut result = allocate_buffer(self)?; - result.append(&mut self.0.to_bytes()?); - result.append(&mut self.1.to_bytes()?); - Ok(result) - } - - fn serialized_length(&self) -> usize { - self.0.serialized_length() + self.1.serialized_length() - } -} - -impl FromBytes for (T1, T2) { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (t1, remainder) = T1::from_bytes(bytes)?; - let (t2, remainder) = T2::from_bytes(remainder)?; - Ok(((t1, t2), remainder)) - } -} - -impl ToBytes for (T1, T2, T3) { - fn to_bytes(&self) -> Result, Error> { - let mut result = allocate_buffer(self)?; - result.append(&mut self.0.to_bytes()?); - result.append(&mut self.1.to_bytes()?); - result.append(&mut self.2.to_bytes()?); - Ok(result) - } - - fn serialized_length(&self) -> usize { - self.0.serialized_length() + self.1.serialized_length() + self.2.serialized_length() - } -} - -impl FromBytes for (T1, T2, T3) { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (t1, remainder) = T1::from_bytes(bytes)?; - let (t2, remainder) = T2::from_bytes(remainder)?; - let (t3, remainder) = T3::from_bytes(remainder)?; - Ok(((t1, t2, t3), remainder)) - } -} - -impl ToBytes for (T1, T2, T3, T4) { - fn to_bytes(&self) -> Result, Error> { - let mut result = allocate_buffer(self)?; - result.append(&mut self.0.to_bytes()?); - result.append(&mut self.1.to_bytes()?); - result.append(&mut self.2.to_bytes()?); - result.append(&mut self.3.to_bytes()?); - Ok(result) - } - - fn serialized_length(&self) -> usize { - self.0.serialized_length() - + self.1.serialized_length() - + self.2.serialized_length() - + self.3.serialized_length() - } -} - -impl FromBytes for (T1, T2, T3, T4) { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (t1, remainder) = T1::from_bytes(bytes)?; - let (t2, remainder) = T2::from_bytes(remainder)?; - let (t3, remainder) = T3::from_bytes(remainder)?; - let (t4, remainder) = T4::from_bytes(remainder)?; - Ok(((t1, t2, t3, t4), remainder)) - } -} - -impl ToBytes - for (T1, T2, T3, T4, T5) -{ - fn to_bytes(&self) -> Result, Error> { - let mut result = allocate_buffer(self)?; - result.append(&mut self.0.to_bytes()?); - result.append(&mut self.1.to_bytes()?); - result.append(&mut self.2.to_bytes()?); - result.append(&mut self.3.to_bytes()?); - result.append(&mut self.4.to_bytes()?); - Ok(result) - } - - fn serialized_length(&self) -> usize { - self.0.serialized_length() - + self.1.serialized_length() - + self.2.serialized_length() - + self.3.serialized_length() - + self.4.serialized_length() - } -} - -impl FromBytes - for (T1, T2, T3, T4, T5) -{ - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (t1, remainder) = T1::from_bytes(bytes)?; - let (t2, remainder) = T2::from_bytes(remainder)?; - let (t3, remainder) = T3::from_bytes(remainder)?; - let (t4, remainder) = T4::from_bytes(remainder)?; - let (t5, remainder) = T5::from_bytes(remainder)?; - Ok(((t1, t2, t3, t4, t5), remainder)) - } -} - -impl ToBytes - for (T1, T2, T3, T4, T5, T6) -{ - fn to_bytes(&self) -> Result, Error> { - let mut result = allocate_buffer(self)?; - result.append(&mut self.0.to_bytes()?); - result.append(&mut self.1.to_bytes()?); - result.append(&mut self.2.to_bytes()?); - result.append(&mut self.3.to_bytes()?); - result.append(&mut self.4.to_bytes()?); - result.append(&mut self.5.to_bytes()?); - Ok(result) - } - - fn serialized_length(&self) -> usize { - self.0.serialized_length() - + self.1.serialized_length() - + self.2.serialized_length() - + self.3.serialized_length() - + self.4.serialized_length() - + self.5.serialized_length() - } -} - -impl - FromBytes for (T1, T2, T3, T4, T5, T6) -{ - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (t1, remainder) = T1::from_bytes(bytes)?; - let (t2, remainder) = T2::from_bytes(remainder)?; - let (t3, remainder) = T3::from_bytes(remainder)?; - let (t4, remainder) = T4::from_bytes(remainder)?; - let (t5, remainder) = T5::from_bytes(remainder)?; - let (t6, remainder) = T6::from_bytes(remainder)?; - Ok(((t1, t2, t3, t4, t5, t6), remainder)) - } -} - -impl - ToBytes for (T1, T2, T3, T4, T5, T6, T7) -{ - fn to_bytes(&self) -> Result, Error> { - let mut result = allocate_buffer(self)?; - result.append(&mut self.0.to_bytes()?); - result.append(&mut self.1.to_bytes()?); - result.append(&mut self.2.to_bytes()?); - result.append(&mut self.3.to_bytes()?); - result.append(&mut self.4.to_bytes()?); - result.append(&mut self.5.to_bytes()?); - result.append(&mut self.6.to_bytes()?); - Ok(result) - } - - fn serialized_length(&self) -> usize { - self.0.serialized_length() - + self.1.serialized_length() - + self.2.serialized_length() - + self.3.serialized_length() - + self.4.serialized_length() - + self.5.serialized_length() - + self.6.serialized_length() - } -} - -impl< - T1: FromBytes, - T2: FromBytes, - T3: FromBytes, - T4: FromBytes, - T5: FromBytes, - T6: FromBytes, - T7: FromBytes, - > FromBytes for (T1, T2, T3, T4, T5, T6, T7) -{ - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (t1, remainder) = T1::from_bytes(bytes)?; - let (t2, remainder) = T2::from_bytes(remainder)?; - let (t3, remainder) = T3::from_bytes(remainder)?; - let (t4, remainder) = T4::from_bytes(remainder)?; - let (t5, remainder) = T5::from_bytes(remainder)?; - let (t6, remainder) = T6::from_bytes(remainder)?; - let (t7, remainder) = T7::from_bytes(remainder)?; - Ok(((t1, t2, t3, t4, t5, t6, t7), remainder)) - } -} - -impl< - T1: ToBytes, - T2: ToBytes, - T3: ToBytes, - T4: ToBytes, - T5: ToBytes, - T6: ToBytes, - T7: ToBytes, - T8: ToBytes, - > ToBytes for (T1, T2, T3, T4, T5, T6, T7, T8) -{ - fn to_bytes(&self) -> Result, Error> { - let mut result = allocate_buffer(self)?; - result.append(&mut self.0.to_bytes()?); - result.append(&mut self.1.to_bytes()?); - result.append(&mut self.2.to_bytes()?); - result.append(&mut self.3.to_bytes()?); - result.append(&mut self.4.to_bytes()?); - result.append(&mut self.5.to_bytes()?); - result.append(&mut self.6.to_bytes()?); - result.append(&mut self.7.to_bytes()?); - Ok(result) - } - - fn serialized_length(&self) -> usize { - self.0.serialized_length() - + self.1.serialized_length() - + self.2.serialized_length() - + self.3.serialized_length() - + self.4.serialized_length() - + self.5.serialized_length() - + self.6.serialized_length() - + self.7.serialized_length() - } -} - -impl< - T1: FromBytes, - T2: FromBytes, - T3: FromBytes, - T4: FromBytes, - T5: FromBytes, - T6: FromBytes, - T7: FromBytes, - T8: FromBytes, - > FromBytes for (T1, T2, T3, T4, T5, T6, T7, T8) -{ - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (t1, remainder) = T1::from_bytes(bytes)?; - let (t2, remainder) = T2::from_bytes(remainder)?; - let (t3, remainder) = T3::from_bytes(remainder)?; - let (t4, remainder) = T4::from_bytes(remainder)?; - let (t5, remainder) = T5::from_bytes(remainder)?; - let (t6, remainder) = T6::from_bytes(remainder)?; - let (t7, remainder) = T7::from_bytes(remainder)?; - let (t8, remainder) = T8::from_bytes(remainder)?; - Ok(((t1, t2, t3, t4, t5, t6, t7, t8), remainder)) - } -} - -impl< - T1: ToBytes, - T2: ToBytes, - T3: ToBytes, - T4: ToBytes, - T5: ToBytes, - T6: ToBytes, - T7: ToBytes, - T8: ToBytes, - T9: ToBytes, - > ToBytes for (T1, T2, T3, T4, T5, T6, T7, T8, T9) -{ - fn to_bytes(&self) -> Result, Error> { - let mut result = allocate_buffer(self)?; - result.append(&mut self.0.to_bytes()?); - result.append(&mut self.1.to_bytes()?); - result.append(&mut self.2.to_bytes()?); - result.append(&mut self.3.to_bytes()?); - result.append(&mut self.4.to_bytes()?); - result.append(&mut self.5.to_bytes()?); - result.append(&mut self.6.to_bytes()?); - result.append(&mut self.7.to_bytes()?); - result.append(&mut self.8.to_bytes()?); - Ok(result) - } - - fn serialized_length(&self) -> usize { - self.0.serialized_length() - + self.1.serialized_length() - + self.2.serialized_length() - + self.3.serialized_length() - + self.4.serialized_length() - + self.5.serialized_length() - + self.6.serialized_length() - + self.7.serialized_length() - + self.8.serialized_length() - } -} - -impl< - T1: FromBytes, - T2: FromBytes, - T3: FromBytes, - T4: FromBytes, - T5: FromBytes, - T6: FromBytes, - T7: FromBytes, - T8: FromBytes, - T9: FromBytes, - > FromBytes for (T1, T2, T3, T4, T5, T6, T7, T8, T9) -{ - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (t1, remainder) = T1::from_bytes(bytes)?; - let (t2, remainder) = T2::from_bytes(remainder)?; - let (t3, remainder) = T3::from_bytes(remainder)?; - let (t4, remainder) = T4::from_bytes(remainder)?; - let (t5, remainder) = T5::from_bytes(remainder)?; - let (t6, remainder) = T6::from_bytes(remainder)?; - let (t7, remainder) = T7::from_bytes(remainder)?; - let (t8, remainder) = T8::from_bytes(remainder)?; - let (t9, remainder) = T9::from_bytes(remainder)?; - Ok(((t1, t2, t3, t4, t5, t6, t7, t8, t9), remainder)) - } -} - -impl< - T1: ToBytes, - T2: ToBytes, - T3: ToBytes, - T4: ToBytes, - T5: ToBytes, - T6: ToBytes, - T7: ToBytes, - T8: ToBytes, - T9: ToBytes, - T10: ToBytes, - > ToBytes for (T1, T2, T3, T4, T5, T6, T7, T8, T9, T10) -{ - fn to_bytes(&self) -> Result, Error> { - let mut result = allocate_buffer(self)?; - result.append(&mut self.0.to_bytes()?); - result.append(&mut self.1.to_bytes()?); - result.append(&mut self.2.to_bytes()?); - result.append(&mut self.3.to_bytes()?); - result.append(&mut self.4.to_bytes()?); - result.append(&mut self.5.to_bytes()?); - result.append(&mut self.6.to_bytes()?); - result.append(&mut self.7.to_bytes()?); - result.append(&mut self.8.to_bytes()?); - result.append(&mut self.9.to_bytes()?); - Ok(result) - } - - fn serialized_length(&self) -> usize { - self.0.serialized_length() - + self.1.serialized_length() - + self.2.serialized_length() - + self.3.serialized_length() - + self.4.serialized_length() - + self.5.serialized_length() - + self.6.serialized_length() - + self.7.serialized_length() - + self.8.serialized_length() - + self.9.serialized_length() - } -} - -impl< - T1: FromBytes, - T2: FromBytes, - T3: FromBytes, - T4: FromBytes, - T5: FromBytes, - T6: FromBytes, - T7: FromBytes, - T8: FromBytes, - T9: FromBytes, - T10: FromBytes, - > FromBytes for (T1, T2, T3, T4, T5, T6, T7, T8, T9, T10) -{ - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (t1, remainder) = T1::from_bytes(bytes)?; - let (t2, remainder) = T2::from_bytes(remainder)?; - let (t3, remainder) = T3::from_bytes(remainder)?; - let (t4, remainder) = T4::from_bytes(remainder)?; - let (t5, remainder) = T5::from_bytes(remainder)?; - let (t6, remainder) = T6::from_bytes(remainder)?; - let (t7, remainder) = T7::from_bytes(remainder)?; - let (t8, remainder) = T8::from_bytes(remainder)?; - let (t9, remainder) = T9::from_bytes(remainder)?; - let (t10, remainder) = T10::from_bytes(remainder)?; - Ok(((t1, t2, t3, t4, t5, t6, t7, t8, t9, t10), remainder)) - } -} - -impl ToBytes for str { - #[inline] - fn to_bytes(&self) -> Result, Error> { - u8_slice_to_bytes(self.as_bytes()) - } - - #[inline] - fn serialized_length(&self) -> usize { - u8_slice_serialized_length(self.as_bytes()) - } - - #[inline] - fn write_bytes(&self, writer: &mut Vec) -> Result<(), Error> { - write_u8_slice(self.as_bytes(), writer)?; - Ok(()) - } -} - -impl ToBytes for &str { - #[inline(always)] - fn to_bytes(&self) -> Result, Error> { - (*self).to_bytes() - } - - #[inline(always)] - fn serialized_length(&self) -> usize { - (*self).serialized_length() - } - - #[inline] - fn write_bytes(&self, writer: &mut Vec) -> Result<(), Error> { - write_u8_slice(self.as_bytes(), writer)?; - Ok(()) - } -} - -impl ToBytes for &T -where - T: ToBytes, -{ - fn to_bytes(&self) -> Result, Error> { - (*self).to_bytes() - } - - fn serialized_length(&self) -> usize { - (*self).serialized_length() - } -} - -impl ToBytes for Ratio -where - T: Clone + Integer + ToBytes, -{ - fn to_bytes(&self) -> Result, Error> { - if self.denom().is_zero() { - return Err(Error::Formatting); - } - (self.numer().clone(), self.denom().clone()).into_bytes() - } - - fn serialized_length(&self) -> usize { - (self.numer().clone(), self.denom().clone()).serialized_length() - } -} - -impl FromBytes for Ratio -where - T: Clone + FromBytes + Integer, -{ - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let ((numer, denom), rem): ((T, T), &[u8]) = FromBytes::from_bytes(bytes)?; - if denom.is_zero() { - return Err(Error::Formatting); - } - Ok((Ratio::new(numer, denom), rem)) - } -} - -/// Serializes a slice of bytes with a length prefix. -/// -/// This function is serializing a slice of bytes with an addition of a 4 byte length prefix. -/// -/// For safety you should prefer to use [`vec_u8_to_bytes`]. For efficiency reasons you should also -/// avoid using serializing Vec. -fn u8_slice_to_bytes(bytes: &[u8]) -> Result, Error> { - let serialized_length = u8_slice_serialized_length(bytes); - let mut vec = try_vec_with_capacity(serialized_length)?; - let length_prefix: u32 = bytes - .len() - .try_into() - .map_err(|_| Error::NotRepresentable)?; - let length_prefix_bytes = length_prefix.to_le_bytes(); - vec.extend_from_slice(&length_prefix_bytes); - vec.extend_from_slice(bytes); - Ok(vec) -} - -fn write_u8_slice(bytes: &[u8], writer: &mut Vec) -> Result<(), Error> { - let length_32: u32 = bytes - .len() - .try_into() - .map_err(|_| Error::NotRepresentable)?; - writer.extend_from_slice(&length_32.to_le_bytes()); - writer.extend_from_slice(bytes); - Ok(()) -} - -/// Serializes a vector of bytes with a length prefix. -/// -/// For efficiency you should avoid serializing Vec. -#[allow(clippy::ptr_arg)] -#[inline] -pub(crate) fn vec_u8_to_bytes(vec: &Vec) -> Result, Error> { - u8_slice_to_bytes(vec.as_slice()) -} - -/// Returns serialized length of serialized slice of bytes. -/// -/// This function adds a length prefix in the beginning. -#[inline(always)] -fn u8_slice_serialized_length(bytes: &[u8]) -> usize { - U32_SERIALIZED_LENGTH + bytes.len() -} - -#[allow(clippy::ptr_arg)] -#[inline] -pub(crate) fn vec_u8_serialized_length(vec: &Vec) -> usize { - u8_slice_serialized_length(vec.as_slice()) -} - -/// Asserts that `t` can be serialized and when deserialized back into an instance `T` compares -/// equal to `t`. -/// -/// Also asserts that `t.serialized_length()` is the same as the actual number of bytes of the -/// serialized `t` instance. -#[cfg(any(feature = "testing", test))] -#[track_caller] -pub fn test_serialization_roundtrip(t: &T) -where - T: fmt::Debug + ToBytes + FromBytes + PartialEq, -{ - let serialized = ToBytes::to_bytes(t).expect("Unable to serialize data"); - assert_eq!( - serialized.len(), - t.serialized_length(), - "\nLength of serialized data: {},\nserialized_length() yielded: {},\n t is {:?}", - serialized.len(), - t.serialized_length(), - t - ); - let mut written_bytes = vec![]; - t.write_bytes(&mut written_bytes) - .expect("Unable to serialize data via write_bytes"); - assert_eq!(serialized, written_bytes); - - let deserialized_from_slice = - deserialize_from_slice(&serialized).expect("Unable to deserialize data"); - assert_eq!(*t, deserialized_from_slice); - - let deserialized = deserialize::(serialized).expect("Unable to deserialize data"); - assert_eq!(*t, deserialized); -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn should_not_serialize_zero_denominator() { - let malicious = Ratio::new_raw(1, 0); - assert_eq!(malicious.to_bytes().unwrap_err(), Error::Formatting); - } - - #[test] - fn should_not_deserialize_zero_denominator() { - let malicious_bytes = (1u64, 0u64).to_bytes().unwrap(); - let result: Result, Error> = deserialize(malicious_bytes); - assert_eq!(result.unwrap_err(), Error::Formatting); - } - - #[test] - fn should_have_generic_tobytes_impl_for_borrowed_types() { - struct NonCopyable; - - impl ToBytes for NonCopyable { - fn to_bytes(&self) -> Result, Error> { - Ok(vec![1, 2, 3]) - } - - fn serialized_length(&self) -> usize { - 3 - } - } - - let noncopyable: &NonCopyable = &NonCopyable; - - assert_eq!(noncopyable.to_bytes().unwrap(), vec![1, 2, 3]); - assert_eq!(noncopyable.serialized_length(), 3); - assert_eq!(noncopyable.into_bytes().unwrap(), vec![1, 2, 3]); - } - - #[cfg(debug_assertions)] - #[test] - #[should_panic( - expected = "You should use `casper_types_ver_2_0::bytesrepr::Bytes` newtype wrapper instead of `Vec` for efficiency" - )] - fn should_fail_to_serialize_slice_of_u8() { - let bytes = b"0123456789".to_vec(); - bytes.to_bytes().unwrap(); - } -} - -#[cfg(test)] -mod proptests { - use std::collections::VecDeque; - - use proptest::{collection::vec, prelude::*}; - - use crate::{ - bytesrepr::{self, bytes::gens::bytes_arb, ToBytes}, - gens::*, - }; - - proptest! { - #[test] - fn test_bool(u in any::()) { - bytesrepr::test_serialization_roundtrip(&u); - } - - #[test] - fn test_u8(u in any::()) { - bytesrepr::test_serialization_roundtrip(&u); - } - - #[test] - fn test_u16(u in any::()) { - bytesrepr::test_serialization_roundtrip(&u); - } - - #[test] - fn test_u32(u in any::()) { - bytesrepr::test_serialization_roundtrip(&u); - } - - #[test] - fn test_i32(u in any::()) { - bytesrepr::test_serialization_roundtrip(&u); - } - - #[test] - fn test_u64(u in any::()) { - bytesrepr::test_serialization_roundtrip(&u); - } - - #[test] - fn test_i64(u in any::()) { - bytesrepr::test_serialization_roundtrip(&u); - } - - #[test] - fn test_u8_slice_32(s in u8_slice_32()) { - bytesrepr::test_serialization_roundtrip(&s); - } - - #[test] - fn test_vec_u8(u in bytes_arb(1..100)) { - bytesrepr::test_serialization_roundtrip(&u); - } - - #[test] - fn test_vec_i32(u in vec(any::(), 1..100)) { - bytesrepr::test_serialization_roundtrip(&u); - } - - #[test] - fn test_vecdeque_i32((front, back) in (vec(any::(), 1..100), vec(any::(), 1..100))) { - let mut vec_deque = VecDeque::new(); - for f in front { - vec_deque.push_front(f); - } - for f in back { - vec_deque.push_back(f); - } - bytesrepr::test_serialization_roundtrip(&vec_deque); - } - - #[test] - fn test_vec_vec_u8(u in vec(bytes_arb(1..100), 10)) { - bytesrepr::test_serialization_roundtrip(&u); - } - - #[test] - fn test_uref_map(m in named_keys_arb(20)) { - bytesrepr::test_serialization_roundtrip(&m); - } - - #[test] - fn test_array_u8_32(arr in any::<[u8; 32]>()) { - bytesrepr::test_serialization_roundtrip(&arr); - } - - #[test] - fn test_string(s in "\\PC*") { - bytesrepr::test_serialization_roundtrip(&s); - } - - #[test] - fn test_str(s in "\\PC*") { - let not_a_string_object = s.as_str(); - not_a_string_object.to_bytes().expect("should serialize a str"); - } - - #[test] - fn test_option(o in proptest::option::of(key_arb())) { - bytesrepr::test_serialization_roundtrip(&o); - } - - #[test] - fn test_unit(unit in Just(())) { - bytesrepr::test_serialization_roundtrip(&unit); - } - - #[test] - fn test_u128_serialization(u in u128_arb()) { - bytesrepr::test_serialization_roundtrip(&u); - } - - #[test] - fn test_u256_serialization(u in u256_arb()) { - bytesrepr::test_serialization_roundtrip(&u); - } - - #[test] - fn test_u512_serialization(u in u512_arb()) { - bytesrepr::test_serialization_roundtrip(&u); - } - - #[test] - fn test_key_serialization(key in key_arb()) { - bytesrepr::test_serialization_roundtrip(&key); - } - - #[test] - fn test_cl_value_serialization(cl_value in cl_value_arb()) { - bytesrepr::test_serialization_roundtrip(&cl_value); - } - - #[test] - fn test_access_rights(access_right in access_rights_arb()) { - bytesrepr::test_serialization_roundtrip(&access_right); - } - - #[test] - fn test_uref(uref in uref_arb()) { - bytesrepr::test_serialization_roundtrip(&uref); - } - - #[test] - fn test_account_hash(pk in account_hash_arb()) { - bytesrepr::test_serialization_roundtrip(&pk); - } - - #[test] - fn test_result(result in result_arb()) { - bytesrepr::test_serialization_roundtrip(&result); - } - - #[test] - fn test_phase_serialization(phase in phase_arb()) { - bytesrepr::test_serialization_roundtrip(&phase); - } - - #[test] - fn test_protocol_version(protocol_version in protocol_version_arb()) { - bytesrepr::test_serialization_roundtrip(&protocol_version); - } - - #[test] - fn test_sem_ver(sem_ver in sem_ver_arb()) { - bytesrepr::test_serialization_roundtrip(&sem_ver); - } - - #[test] - fn test_tuple1(t in (any::(),)) { - bytesrepr::test_serialization_roundtrip(&t); - } - - #[test] - fn test_tuple2(t in (any::(),any::())) { - bytesrepr::test_serialization_roundtrip(&t); - } - - #[test] - fn test_tuple3(t in (any::(),any::(),any::())) { - bytesrepr::test_serialization_roundtrip(&t); - } - - #[test] - fn test_tuple4(t in (any::(),any::(),any::(), any::())) { - bytesrepr::test_serialization_roundtrip(&t); - } - #[test] - fn test_tuple5(t in (any::(),any::(),any::(), any::(), any::())) { - bytesrepr::test_serialization_roundtrip(&t); - } - #[test] - fn test_tuple6(t in (any::(),any::(),any::(), any::(), any::(), any::())) { - bytesrepr::test_serialization_roundtrip(&t); - } - #[test] - fn test_tuple7(t in (any::(),any::(),any::(), any::(), any::(), any::(), any::())) { - bytesrepr::test_serialization_roundtrip(&t); - } - #[test] - fn test_tuple8(t in (any::(),any::(),any::(), any::(), any::(), any::(), any::(), any::())) { - bytesrepr::test_serialization_roundtrip(&t); - } - #[test] - fn test_tuple9(t in (any::(),any::(),any::(), any::(), any::(), any::(), any::(), any::(), any::())) { - bytesrepr::test_serialization_roundtrip(&t); - } - #[test] - fn test_tuple10(t in (any::(),any::(),any::(), any::(), any::(), any::(), any::(), any::(), any::(), any::())) { - bytesrepr::test_serialization_roundtrip(&t); - } - #[test] - fn test_ratio_u64(t in (any::(), 1..u64::max_value())) { - bytesrepr::test_serialization_roundtrip(&t); - } - } -} diff --git a/casper_types_ver_2_0/src/bytesrepr/bytes.rs b/casper_types_ver_2_0/src/bytesrepr/bytes.rs deleted file mode 100644 index cf7196ce..00000000 --- a/casper_types_ver_2_0/src/bytesrepr/bytes.rs +++ /dev/null @@ -1,405 +0,0 @@ -use alloc::{ - string::String, - vec::{IntoIter, Vec}, -}; -use core::{ - cmp, fmt, - iter::FromIterator, - ops::{Deref, Index, Range, RangeFrom, RangeFull, RangeTo}, - slice, -}; - -use rand::{ - distributions::{Distribution, Standard}, - Rng, -}; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{ - de::{Error as SerdeError, SeqAccess, Visitor}, - Deserialize, Deserializer, Serialize, Serializer, -}; - -use super::{Error, FromBytes, ToBytes}; -use crate::{checksummed_hex, CLType, CLTyped}; - -/// A newtype wrapper for bytes that has efficient serialization routines. -#[derive(Clone, Ord, PartialOrd, Eq, PartialEq, Debug, Default, Hash)] -#[cfg_attr( - feature = "json-schema", - derive(JsonSchema), - schemars(description = "Hex-encoded bytes.") -)] -#[rustfmt::skip] -pub struct Bytes( - #[cfg_attr(feature = "json-schema", schemars(skip, with = "String"))] - Vec -); - -impl Bytes { - /// Constructs a new, empty vector of bytes. - pub fn new() -> Bytes { - Bytes::default() - } - - /// Returns reference to inner container. - #[inline] - pub fn inner_bytes(&self) -> &Vec { - &self.0 - } - - /// Extracts a slice containing the entire vector. - pub fn as_slice(&self) -> &[u8] { - self - } - - /// Consumes self and returns the inner bytes. - pub fn take_inner(self) -> Vec { - self.0 - } -} - -impl Deref for Bytes { - type Target = [u8]; - - fn deref(&self) -> &Self::Target { - self.0.deref() - } -} - -impl From> for Bytes { - fn from(vec: Vec) -> Self { - Self(vec) - } -} - -impl From for Vec { - fn from(bytes: Bytes) -> Self { - bytes.0 - } -} - -impl From<&[u8]> for Bytes { - fn from(bytes: &[u8]) -> Self { - Self(bytes.to_vec()) - } -} - -impl CLTyped for Bytes { - fn cl_type() -> CLType { - >::cl_type() - } -} - -impl AsRef<[u8]> for Bytes { - fn as_ref(&self) -> &[u8] { - self.0.as_ref() - } -} - -impl ToBytes for Bytes { - #[inline(always)] - fn to_bytes(&self) -> Result, Error> { - super::vec_u8_to_bytes(&self.0) - } - - #[inline(always)] - fn into_bytes(self) -> Result, Error> { - super::vec_u8_to_bytes(&self.0) - } - - #[inline(always)] - fn serialized_length(&self) -> usize { - super::vec_u8_serialized_length(&self.0) - } - - #[inline(always)] - fn write_bytes(&self, writer: &mut Vec) -> Result<(), Error> { - super::write_u8_slice(self.as_slice(), writer) - } -} - -impl FromBytes for Bytes { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), super::Error> { - let (size, remainder) = u32::from_bytes(bytes)?; - let (result, remainder) = super::safe_split_at(remainder, size as usize)?; - Ok((Bytes(result.to_vec()), remainder)) - } - - fn from_vec(stream: Vec) -> Result<(Self, Vec), Error> { - let (size, mut stream) = u32::from_vec(stream)?; - - if size as usize > stream.len() { - Err(Error::EarlyEndOfStream) - } else { - let remainder = stream.split_off(size as usize); - Ok((Bytes(stream), remainder)) - } - } -} - -impl Index for Bytes { - type Output = u8; - - fn index(&self, index: usize) -> &u8 { - let Bytes(ref dat) = self; - &dat[index] - } -} - -impl Index> for Bytes { - type Output = [u8]; - - fn index(&self, index: Range) -> &[u8] { - let Bytes(dat) = self; - &dat[index] - } -} - -impl Index> for Bytes { - type Output = [u8]; - - fn index(&self, index: RangeTo) -> &[u8] { - let Bytes(dat) = self; - &dat[index] - } -} - -impl Index> for Bytes { - type Output = [u8]; - - fn index(&self, index: RangeFrom) -> &[u8] { - let Bytes(dat) = self; - &dat[index] - } -} - -impl Index for Bytes { - type Output = [u8]; - - fn index(&self, _: RangeFull) -> &[u8] { - let Bytes(dat) = self; - &dat[..] - } -} - -impl FromIterator for Bytes { - #[inline] - fn from_iter>(iter: I) -> Bytes { - let vec = Vec::from_iter(iter); - Bytes(vec) - } -} - -impl<'a> IntoIterator for &'a Bytes { - type Item = &'a u8; - - type IntoIter = slice::Iter<'a, u8>; - - fn into_iter(self) -> Self::IntoIter { - self.0.iter() - } -} - -impl IntoIterator for Bytes { - type Item = u8; - - type IntoIter = IntoIter; - - fn into_iter(self) -> Self::IntoIter { - self.0.into_iter() - } -} - -#[cfg(feature = "datasize")] -impl datasize::DataSize for Bytes { - const IS_DYNAMIC: bool = true; - - const STATIC_HEAP_SIZE: usize = 0; - - fn estimate_heap_size(&self) -> usize { - self.0.capacity() * std::mem::size_of::() - } -} - -const RANDOM_BYTES_MAX_LENGTH: usize = 100; - -impl Distribution for Standard { - fn sample(&self, rng: &mut R) -> Bytes { - let len = rng.gen_range(0..RANDOM_BYTES_MAX_LENGTH); - let mut result = Vec::with_capacity(len); - for _ in 0..len { - result.push(rng.gen()); - } - result.into() - } -} - -struct BytesVisitor; - -impl<'de> Visitor<'de> for BytesVisitor { - type Value = Bytes; - - fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter.write_str("byte array") - } - - fn visit_seq(self, mut visitor: V) -> Result - where - V: SeqAccess<'de>, - { - let len = cmp::min(visitor.size_hint().unwrap_or(0), 4096); - let mut bytes = Vec::with_capacity(len); - - while let Some(b) = visitor.next_element()? { - bytes.push(b); - } - - Ok(Bytes::from(bytes)) - } - - fn visit_bytes(self, v: &[u8]) -> Result - where - E: SerdeError, - { - Ok(Bytes::from(v)) - } - - fn visit_byte_buf(self, v: Vec) -> Result - where - E: SerdeError, - { - Ok(Bytes::from(v)) - } - - fn visit_str(self, v: &str) -> Result - where - E: SerdeError, - { - Ok(Bytes::from(v.as_bytes())) - } - - fn visit_string(self, v: String) -> Result - where - E: SerdeError, - { - Ok(Bytes::from(v.into_bytes())) - } -} - -impl<'de> Deserialize<'de> for Bytes { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - if deserializer.is_human_readable() { - let hex_string = String::deserialize(deserializer)?; - checksummed_hex::decode(hex_string) - .map(Bytes) - .map_err(SerdeError::custom) - } else { - let bytes = deserializer.deserialize_byte_buf(BytesVisitor)?; - Ok(bytes) - } - } -} - -impl Serialize for Bytes { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - if serializer.is_human_readable() { - base16::encode_lower(&self.0).serialize(serializer) - } else { - serializer.serialize_bytes(&self.0) - } - } -} - -#[cfg(test)] -mod tests { - use crate::bytesrepr::{self, Error, FromBytes, ToBytes, U32_SERIALIZED_LENGTH}; - use alloc::vec::Vec; - - use serde_json::json; - use serde_test::{assert_tokens, Configure, Token}; - - use super::Bytes; - - const TRUTH: &[u8] = &[0xde, 0xad, 0xbe, 0xef]; - - #[test] - fn vec_u8_from_bytes() { - let data: Bytes = vec![1, 2, 3, 4, 5].into(); - let data_bytes = data.to_bytes().unwrap(); - assert!(Bytes::from_bytes(&data_bytes[..U32_SERIALIZED_LENGTH / 2]).is_err()); - assert!(Bytes::from_bytes(&data_bytes[..U32_SERIALIZED_LENGTH]).is_err()); - assert!(Bytes::from_bytes(&data_bytes[..U32_SERIALIZED_LENGTH + 2]).is_err()); - } - - #[test] - fn should_serialize_deserialize_bytes() { - let data: Bytes = vec![1, 2, 3, 4, 5].into(); - bytesrepr::test_serialization_roundtrip(&data); - } - - #[test] - fn should_fail_to_serialize_deserialize_malicious_bytes() { - let data: Bytes = vec![1, 2, 3, 4, 5].into(); - let mut serialized = data.to_bytes().expect("should serialize data"); - serialized = serialized[..serialized.len() - 1].to_vec(); - let res: Result<(_, &[u8]), Error> = Bytes::from_bytes(&serialized); - assert_eq!(res.unwrap_err(), Error::EarlyEndOfStream); - } - - #[test] - fn should_serialize_deserialize_bytes_and_keep_rem() { - let data: Bytes = vec![1, 2, 3, 4, 5].into(); - let expected_rem: Vec = vec![6, 7, 8, 9, 10]; - let mut serialized = data.to_bytes().expect("should serialize data"); - serialized.extend(&expected_rem); - let (deserialized, rem): (Bytes, &[u8]) = - FromBytes::from_bytes(&serialized).expect("should deserialize data"); - assert_eq!(data, deserialized); - assert_eq!(&rem, &expected_rem); - } - - #[test] - fn should_ser_de_human_readable() { - let truth = vec![0xde, 0xad, 0xbe, 0xef]; - - let bytes_ser: Bytes = truth.clone().into(); - - let json_object = serde_json::to_value(bytes_ser).unwrap(); - assert_eq!(json_object, json!("deadbeef")); - - let bytes_de: Bytes = serde_json::from_value(json_object).unwrap(); - assert_eq!(bytes_de, Bytes::from(truth)); - } - - #[test] - fn should_ser_de_readable() { - let truth: Bytes = TRUTH.into(); - assert_tokens(&truth.readable(), &[Token::Str("deadbeef")]); - } - - #[test] - fn should_ser_de_compact() { - let truth: Bytes = TRUTH.into(); - assert_tokens(&truth.compact(), &[Token::Bytes(TRUTH)]); - } -} - -#[cfg(test)] -pub mod gens { - use super::Bytes; - use proptest::{ - collection::{vec, SizeRange}, - prelude::*, - }; - - pub fn bytes_arb(size: impl Into) -> impl Strategy { - vec(any::(), size).prop_map(Bytes::from) - } -} diff --git a/casper_types_ver_2_0/src/chainspec.rs b/casper_types_ver_2_0/src/chainspec.rs deleted file mode 100644 index cc0f0265..00000000 --- a/casper_types_ver_2_0/src/chainspec.rs +++ /dev/null @@ -1,260 +0,0 @@ -//! The chainspec is a set of configuration options for the network. All validators must apply the -//! same set of options in order to join and act as a peer in a given network. - -mod accounts_config; -mod activation_point; -mod chainspec_raw_bytes; -mod core_config; -mod fee_handling; -mod global_state_update; -mod highway_config; -mod network_config; -mod next_upgrade; -mod protocol_config; -mod refund_handling; -mod transaction_config; -mod vm_config; - -use std::{fmt::Debug, sync::Arc}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(any(feature = "testing", test))] -use rand::Rng; -use serde::Serialize; -use tracing::error; - -#[cfg(any(feature = "testing", test))] -use crate::testing::TestRng; -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - Digest, EraId, ProtocolVersion, -}; -pub use accounts_config::{ - AccountConfig, AccountsConfig, AdministratorAccount, DelegatorConfig, GenesisAccount, - GenesisValidator, ValidatorConfig, -}; -pub use activation_point::ActivationPoint; -pub use chainspec_raw_bytes::ChainspecRawBytes; -pub use core_config::{ConsensusProtocolName, CoreConfig, LegacyRequiredFinality}; -pub use fee_handling::FeeHandling; -pub use global_state_update::{GlobalStateUpdate, GlobalStateUpdateConfig, GlobalStateUpdateError}; -pub use highway_config::HighwayConfig; -pub use network_config::NetworkConfig; -pub use next_upgrade::NextUpgrade; -pub use protocol_config::ProtocolConfig; -pub use refund_handling::RefundHandling; -pub use transaction_config::{DeployConfig, TransactionConfig, TransactionV1Config}; -#[cfg(any(feature = "testing", test))] -pub use transaction_config::{DEFAULT_MAX_PAYMENT_MOTES, DEFAULT_MIN_TRANSFER_MOTES}; -pub use vm_config::{ - AuctionCosts, BrTableCost, ChainspecRegistry, ControlFlowCosts, HandlePaymentCosts, - HostFunction, HostFunctionCost, HostFunctionCosts, MessageLimits, MintCosts, OpcodeCosts, - StandardPaymentCosts, StorageCosts, SystemConfig, UpgradeConfig, WasmConfig, - DEFAULT_HOST_FUNCTION_NEW_DICTIONARY, -}; -#[cfg(any(feature = "testing", test))] -pub use vm_config::{ - DEFAULT_ADD_BID_COST, DEFAULT_ADD_COST, DEFAULT_BIT_COST, DEFAULT_CONST_COST, - DEFAULT_CONTROL_FLOW_BLOCK_OPCODE, DEFAULT_CONTROL_FLOW_BR_IF_OPCODE, - DEFAULT_CONTROL_FLOW_BR_OPCODE, DEFAULT_CONTROL_FLOW_BR_TABLE_MULTIPLIER, - DEFAULT_CONTROL_FLOW_BR_TABLE_OPCODE, DEFAULT_CONTROL_FLOW_CALL_INDIRECT_OPCODE, - DEFAULT_CONTROL_FLOW_CALL_OPCODE, DEFAULT_CONTROL_FLOW_DROP_OPCODE, - DEFAULT_CONTROL_FLOW_ELSE_OPCODE, DEFAULT_CONTROL_FLOW_END_OPCODE, - DEFAULT_CONTROL_FLOW_IF_OPCODE, DEFAULT_CONTROL_FLOW_LOOP_OPCODE, - DEFAULT_CONTROL_FLOW_RETURN_OPCODE, DEFAULT_CONTROL_FLOW_SELECT_OPCODE, - DEFAULT_CONVERSION_COST, DEFAULT_CURRENT_MEMORY_COST, DEFAULT_DELEGATE_COST, DEFAULT_DIV_COST, - DEFAULT_GLOBAL_COST, DEFAULT_GROW_MEMORY_COST, DEFAULT_INTEGER_COMPARISON_COST, - DEFAULT_LOAD_COST, DEFAULT_LOCAL_COST, DEFAULT_MAX_STACK_HEIGHT, DEFAULT_MUL_COST, - DEFAULT_NEW_DICTIONARY_COST, DEFAULT_NOP_COST, DEFAULT_STORE_COST, DEFAULT_TRANSFER_COST, - DEFAULT_UNREACHABLE_COST, DEFAULT_WASMLESS_TRANSFER_COST, DEFAULT_WASM_MAX_MEMORY, -}; - -/// A collection of configuration settings describing the state of the system at genesis and after -/// upgrades to basic system functionality occurring after genesis. -#[derive(PartialEq, Eq, Serialize, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[serde(deny_unknown_fields)] -pub struct Chainspec { - /// Protocol config. - #[serde(rename = "protocol")] - pub protocol_config: ProtocolConfig, - - /// Network config. - #[serde(rename = "network")] - pub network_config: NetworkConfig, - - /// Core config. - #[serde(rename = "core")] - pub core_config: CoreConfig, - - /// Highway config. - #[serde(rename = "highway")] - pub highway_config: HighwayConfig, - - /// Transaction Config. - #[serde(rename = "transactions")] - pub transaction_config: TransactionConfig, - - /// Wasm config. - #[serde(rename = "wasm")] - pub wasm_config: WasmConfig, - - /// System costs config. - #[serde(rename = "system_costs")] - pub system_costs_config: SystemConfig, -} - -impl Chainspec { - /// Serializes `self` and hashes the resulting bytes. - pub fn hash(&self) -> Digest { - let serialized_chainspec = self.to_bytes().unwrap_or_else(|error| { - error!(%error, "failed to serialize chainspec"); - vec![] - }); - Digest::hash(serialized_chainspec) - } - - /// Serializes `self` and hashes the resulting bytes, if able. - pub fn try_hash(&self) -> Result { - let arr = self - .to_bytes() - .map_err(|_| "failed to serialize chainspec".to_string())?; - Ok(Digest::hash(arr)) - } - - /// Returns the protocol version of the chainspec. - pub fn protocol_version(&self) -> ProtocolVersion { - self.protocol_config.version - } - - /// Returns the era ID of where we should reset back to. This means stored blocks in that and - /// subsequent eras are deleted from storage. - pub fn hard_reset_to_start_of_era(&self) -> Option { - self.protocol_config - .hard_reset - .then(|| self.protocol_config.activation_point.era_id()) - } - - /// Creates an upgrade config instance from parts. - pub fn upgrade_config_from_parts( - &self, - pre_state_hash: Digest, - current_protocol_version: ProtocolVersion, - era_id: EraId, - chainspec_raw_bytes: Arc, - ) -> Result { - let chainspec_registry = ChainspecRegistry::new_with_optional_global_state( - chainspec_raw_bytes.chainspec_bytes(), - chainspec_raw_bytes.maybe_global_state_bytes(), - ); - let global_state_update = match self.protocol_config.get_update_mapping() { - Ok(global_state_update) => global_state_update, - Err(err) => { - return Err(format!("failed to generate global state update: {}", err)); - } - }; - - Ok(UpgradeConfig::new( - pre_state_hash, - current_protocol_version, - self.protocol_config.version, - Some(era_id), - Some(self.core_config.validator_slots), - Some(self.core_config.auction_delay), - Some(self.core_config.locked_funds_period.millis()), - Some(self.core_config.round_seigniorage_rate), - Some(self.core_config.unbonding_delay), - global_state_update, - chainspec_registry, - )) - } -} - -#[cfg(any(feature = "testing", test))] -impl Chainspec { - /// Generates a random instance using a `TestRng`. - pub fn random(rng: &mut TestRng) -> Self { - let protocol_config = ProtocolConfig::random(rng); - let network_config = NetworkConfig::random(rng); - let core_config = CoreConfig::random(rng); - let highway_config = HighwayConfig::random(rng); - let transaction_config = TransactionConfig::random(rng); - let wasm_config = rng.gen(); - let system_costs_config = rng.gen(); - - Chainspec { - protocol_config, - network_config, - core_config, - highway_config, - transaction_config, - wasm_config, - system_costs_config, - } - } -} - -impl ToBytes for Chainspec { - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.protocol_config.write_bytes(writer)?; - self.network_config.write_bytes(writer)?; - self.core_config.write_bytes(writer)?; - self.highway_config.write_bytes(writer)?; - self.transaction_config.write_bytes(writer)?; - self.wasm_config.write_bytes(writer)?; - self.system_costs_config.write_bytes(writer) - } - - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - self.protocol_config.serialized_length() - + self.network_config.serialized_length() - + self.core_config.serialized_length() - + self.highway_config.serialized_length() - + self.transaction_config.serialized_length() - + self.wasm_config.serialized_length() - + self.system_costs_config.serialized_length() - } -} - -impl FromBytes for Chainspec { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (protocol_config, remainder) = ProtocolConfig::from_bytes(bytes)?; - let (network_config, remainder) = NetworkConfig::from_bytes(remainder)?; - let (core_config, remainder) = CoreConfig::from_bytes(remainder)?; - let (highway_config, remainder) = HighwayConfig::from_bytes(remainder)?; - let (transaction_config, remainder) = TransactionConfig::from_bytes(remainder)?; - let (wasm_config, remainder) = WasmConfig::from_bytes(remainder)?; - let (system_costs_config, remainder) = SystemConfig::from_bytes(remainder)?; - let chainspec = Chainspec { - protocol_config, - network_config, - core_config, - highway_config, - transaction_config, - wasm_config, - system_costs_config, - }; - Ok((chainspec, remainder)) - } -} - -#[cfg(test)] -mod tests { - use super::*; - - use rand::SeedableRng; - - #[test] - fn bytesrepr_roundtrip() { - let mut rng = TestRng::from_entropy(); - let chainspec = Chainspec::random(&mut rng); - bytesrepr::test_serialization_roundtrip(&chainspec); - } -} diff --git a/casper_types_ver_2_0/src/chainspec/accounts_config.rs b/casper_types_ver_2_0/src/chainspec/accounts_config.rs deleted file mode 100644 index cffc9e80..00000000 --- a/casper_types_ver_2_0/src/chainspec/accounts_config.rs +++ /dev/null @@ -1,192 +0,0 @@ -//! The accounts config is a set of configuration options that is used to create accounts at -//! genesis, and set up auction contract with validators and delegators. -mod account_config; -mod delegator_config; -mod genesis; -mod validator_config; -#[cfg(feature = "datasize")] -use datasize::DataSize; -use serde::{Deserialize, Deserializer, Serialize}; - -#[cfg(any(feature = "testing", test))] -use crate::testing::TestRng; -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - PublicKey, -}; - -pub use account_config::AccountConfig; -pub use delegator_config::DelegatorConfig; -pub use genesis::{AdministratorAccount, GenesisAccount, GenesisValidator}; -pub use validator_config::ValidatorConfig; - -fn sorted_vec_deserializer<'de, T, D>(deserializer: D) -> Result, D::Error> -where - T: Deserialize<'de> + Ord, - D: Deserializer<'de>, -{ - let mut vec = Vec::::deserialize(deserializer)?; - vec.sort_unstable(); - Ok(vec) -} - -/// Configuration values associated with accounts.toml -#[derive(PartialEq, Eq, Serialize, Deserialize, Debug, Clone)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -pub struct AccountsConfig { - #[serde(deserialize_with = "sorted_vec_deserializer")] - accounts: Vec, - #[serde(default, deserialize_with = "sorted_vec_deserializer")] - delegators: Vec, - #[serde( - default, - deserialize_with = "sorted_vec_deserializer", - skip_serializing_if = "Vec::is_empty" - )] - administrators: Vec, -} - -impl AccountsConfig { - /// Create new accounts config instance. - pub fn new( - accounts: Vec, - delegators: Vec, - administrators: Vec, - ) -> Self { - Self { - accounts, - delegators, - administrators, - } - } - - /// Accounts. - pub fn accounts(&self) -> &[AccountConfig] { - &self.accounts - } - - /// Delegators. - pub fn delegators(&self) -> &[DelegatorConfig] { - &self.delegators - } - - /// Administrators. - pub fn administrators(&self) -> &[AdministratorAccount] { - &self.administrators - } - - /// Account. - pub fn account(&self, public_key: &PublicKey) -> Option<&AccountConfig> { - self.accounts - .iter() - .find(|account| &account.public_key == public_key) - } - - /// All of the validators. - pub fn validators(&self) -> impl Iterator { - self.accounts - .iter() - .filter(|account| account.validator.is_some()) - } - - /// Is the provided public key in the set of genesis validator public keys. - pub fn is_genesis_validator(&self, public_key: &PublicKey) -> bool { - match self.account(public_key) { - None => false, - Some(account_config) => account_config.is_genesis_validator(), - } - } - - #[cfg(any(feature = "testing", test))] - /// Generates a random instance using a `TestRng`. - pub fn random(rng: &mut TestRng) -> Self { - use rand::Rng; - - use crate::{Motes, U512}; - - let alpha = AccountConfig::random(rng); - let accounts = vec![ - alpha.clone(), - AccountConfig::random(rng), - AccountConfig::random(rng), - AccountConfig::random(rng), - ]; - - let mut delegator = DelegatorConfig::random(rng); - delegator.validator_public_key = alpha.public_key; - - let delegators = vec![delegator]; - - let admin_balance: u32 = rng.gen(); - let administrators = vec![AdministratorAccount::new( - PublicKey::random(rng), - Motes::new(U512::from(admin_balance)), - )]; - - AccountsConfig { - accounts, - delegators, - administrators, - } - } -} - -impl ToBytes for AccountsConfig { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - buffer.extend(self.accounts.to_bytes()?); - buffer.extend(self.delegators.to_bytes()?); - buffer.extend(self.administrators.to_bytes()?); - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - self.accounts.serialized_length() - + self.delegators.serialized_length() - + self.administrators.serialized_length() - } -} - -impl FromBytes for AccountsConfig { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (accounts, remainder) = FromBytes::from_bytes(bytes)?; - let (delegators, remainder) = FromBytes::from_bytes(remainder)?; - let (administrators, remainder) = FromBytes::from_bytes(remainder)?; - let accounts_config = AccountsConfig::new(accounts, delegators, administrators); - Ok((accounts_config, remainder)) - } -} - -impl From for Vec { - fn from(accounts_config: AccountsConfig) -> Self { - let mut genesis_accounts = Vec::with_capacity(accounts_config.accounts.len()); - for account_config in accounts_config.accounts { - let genesis_account = account_config.into(); - genesis_accounts.push(genesis_account); - } - for delegator_config in accounts_config.delegators { - let genesis_account = delegator_config.into(); - genesis_accounts.push(genesis_account); - } - - for administrator_config in accounts_config.administrators { - let administrator_account = administrator_config.into(); - genesis_accounts.push(administrator_account); - } - - genesis_accounts - } -} - -#[cfg(any(feature = "testing", test))] -mod tests { - #[cfg(test)] - use crate::{bytesrepr, testing::TestRng, AccountsConfig}; - - #[test] - fn serialization_roundtrip() { - let mut rng = TestRng::new(); - let accounts_config = AccountsConfig::random(&mut rng); - bytesrepr::test_serialization_roundtrip(&accounts_config); - } -} diff --git a/casper_types_ver_2_0/src/chainspec/accounts_config/account_config.rs b/casper_types_ver_2_0/src/chainspec/accounts_config/account_config.rs deleted file mode 100644 index 7c998d35..00000000 --- a/casper_types_ver_2_0/src/chainspec/accounts_config/account_config.rs +++ /dev/null @@ -1,138 +0,0 @@ -#[cfg(feature = "datasize")] -use datasize::DataSize; -use num::Zero; - -#[cfg(any(feature = "testing", test))] -use rand::{distributions::Standard, prelude::*}; - -use serde::{Deserialize, Serialize}; - -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - GenesisAccount, Motes, PublicKey, -}; - -#[cfg(any(feature = "testing", test))] -use crate::testing::TestRng; -#[cfg(any(feature = "testing", test))] -use crate::{SecretKey, U512}; - -use super::ValidatorConfig; - -/// Configuration of an individial account in accounts.toml -#[derive(PartialEq, Ord, PartialOrd, Eq, Serialize, Deserialize, Debug, Clone)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -pub struct AccountConfig { - /// Public Key. - pub public_key: PublicKey, - /// Balance. - pub balance: Motes, - /// Validator config. - pub validator: Option, -} - -impl AccountConfig { - /// Creates a new `AccountConfig`. - pub fn new(public_key: PublicKey, balance: Motes, validator: Option) -> Self { - Self { - public_key, - balance, - validator, - } - } - - /// Public key. - pub fn public_key(&self) -> PublicKey { - self.public_key.clone() - } - - /// Balance. - pub fn balance(&self) -> Motes { - self.balance - } - - /// Bonded amount. - pub fn bonded_amount(&self) -> Motes { - match self.validator { - Some(validator_config) => validator_config.bonded_amount(), - None => Motes::zero(), - } - } - - /// Is this a genesis validator? - pub fn is_genesis_validator(&self) -> bool { - self.validator.is_some() - } - - #[cfg(any(feature = "testing", test))] - /// Generates a random instance using a `TestRng`. - pub fn random(rng: &mut TestRng) -> Self { - let public_key = - PublicKey::from(&SecretKey::ed25519_from_bytes(rng.gen::<[u8; 32]>()).unwrap()); - let balance = Motes::new(rng.gen()); - let validator = rng.gen(); - - AccountConfig { - public_key, - balance, - validator, - } - } -} - -#[cfg(any(feature = "testing", test))] -impl Distribution for Standard { - fn sample(&self, rng: &mut R) -> AccountConfig { - let secret_key = SecretKey::ed25519_from_bytes(rng.gen::<[u8; 32]>()).unwrap(); - let public_key = PublicKey::from(&secret_key); - - let mut u512_array = [0u8; 64]; - rng.fill_bytes(u512_array.as_mut()); - let balance = Motes::new(U512::from(u512_array)); - - let validator = rng.gen(); - - AccountConfig::new(public_key, balance, validator) - } -} - -impl ToBytes for AccountConfig { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - buffer.extend(self.public_key.to_bytes()?); - buffer.extend(self.balance.to_bytes()?); - buffer.extend(self.validator.to_bytes()?); - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - self.public_key.serialized_length() - + self.balance.serialized_length() - + self.validator.serialized_length() - } -} - -impl FromBytes for AccountConfig { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (public_key, remainder) = FromBytes::from_bytes(bytes)?; - let (balance, remainder) = FromBytes::from_bytes(remainder)?; - let (validator, remainder) = FromBytes::from_bytes(remainder)?; - let account_config = AccountConfig { - public_key, - balance, - validator, - }; - Ok((account_config, remainder)) - } -} - -impl From for GenesisAccount { - fn from(account_config: AccountConfig) -> Self { - let genesis_validator = account_config.validator.map(Into::into); - GenesisAccount::account( - account_config.public_key, - account_config.balance, - genesis_validator, - ) - } -} diff --git a/casper_types_ver_2_0/src/chainspec/accounts_config/delegator_config.rs b/casper_types_ver_2_0/src/chainspec/accounts_config/delegator_config.rs deleted file mode 100644 index b91422b5..00000000 --- a/casper_types_ver_2_0/src/chainspec/accounts_config/delegator_config.rs +++ /dev/null @@ -1,133 +0,0 @@ -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(any(feature = "testing", test))] -use rand::{distributions::Standard, prelude::*}; -use serde::{Deserialize, Serialize}; - -#[cfg(any(feature = "testing", test))] -use crate::testing::TestRng; -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - GenesisAccount, Motes, PublicKey, -}; -#[cfg(any(feature = "testing", test))] -use crate::{SecretKey, U512}; - -/// Configuration values related to a delegator. -#[derive(PartialEq, Ord, PartialOrd, Eq, Serialize, Deserialize, Debug, Clone)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -pub struct DelegatorConfig { - /// Validator public key. - pub validator_public_key: PublicKey, - /// Delegator public key. - pub delegator_public_key: PublicKey, - /// Balance for this delegator in Motes. - pub balance: Motes, - /// Delegated amount in Motes. - pub delegated_amount: Motes, -} - -impl DelegatorConfig { - /// Creates a new DelegatorConfig. - pub fn new( - validator_public_key: PublicKey, - delegator_public_key: PublicKey, - balance: Motes, - delegated_amount: Motes, - ) -> Self { - Self { - validator_public_key, - delegator_public_key, - balance, - delegated_amount, - } - } - - #[cfg(any(feature = "testing", test))] - /// Generates a random instance using a `TestRng`. - pub fn random(rng: &mut TestRng) -> Self { - let validator_public_key = - PublicKey::from(&SecretKey::ed25519_from_bytes(rng.gen::<[u8; 32]>()).unwrap()); - let delegator_public_key = - PublicKey::from(&SecretKey::ed25519_from_bytes(rng.gen::<[u8; 32]>()).unwrap()); - let balance = Motes::new(U512::from(rng.gen::())); - let delegated_amount = Motes::new(U512::from(rng.gen::())); - - DelegatorConfig { - validator_public_key, - delegator_public_key, - balance, - delegated_amount, - } - } -} - -#[cfg(any(feature = "testing", test))] -impl Distribution for Standard { - fn sample(&self, rng: &mut R) -> DelegatorConfig { - let validator_secret_key = SecretKey::ed25519_from_bytes(rng.gen::<[u8; 32]>()).unwrap(); - let delegator_secret_key = SecretKey::ed25519_from_bytes(rng.gen::<[u8; 32]>()).unwrap(); - - let validator_public_key = PublicKey::from(&validator_secret_key); - let delegator_public_key = PublicKey::from(&delegator_secret_key); - - let mut u512_array = [0u8; 64]; - rng.fill_bytes(u512_array.as_mut()); - let balance = Motes::new(U512::from(u512_array)); - - rng.fill_bytes(u512_array.as_mut()); - let delegated_amount = Motes::new(U512::from(u512_array)); - - DelegatorConfig::new( - validator_public_key, - delegator_public_key, - balance, - delegated_amount, - ) - } -} - -impl ToBytes for DelegatorConfig { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - buffer.extend(self.validator_public_key.to_bytes()?); - buffer.extend(self.delegator_public_key.to_bytes()?); - buffer.extend(self.balance.to_bytes()?); - buffer.extend(self.delegated_amount.to_bytes()?); - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - self.validator_public_key.serialized_length() - + self.delegator_public_key.serialized_length() - + self.balance.serialized_length() - + self.delegated_amount.serialized_length() - } -} - -impl FromBytes for DelegatorConfig { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (validator_public_key, remainder) = FromBytes::from_bytes(bytes)?; - let (delegator_public_key, remainder) = FromBytes::from_bytes(remainder)?; - let (balance, remainder) = FromBytes::from_bytes(remainder)?; - let (delegated_amount, remainder) = FromBytes::from_bytes(remainder)?; - let delegator_config = DelegatorConfig { - validator_public_key, - delegator_public_key, - balance, - delegated_amount, - }; - Ok((delegator_config, remainder)) - } -} - -impl From for GenesisAccount { - fn from(delegator_config: DelegatorConfig) -> Self { - GenesisAccount::delegator( - delegator_config.validator_public_key, - delegator_config.delegator_public_key, - delegator_config.balance, - delegator_config.delegated_amount, - ) - } -} diff --git a/casper_types_ver_2_0/src/chainspec/accounts_config/genesis.rs b/casper_types_ver_2_0/src/chainspec/accounts_config/genesis.rs deleted file mode 100644 index 08d601ee..00000000 --- a/casper_types_ver_2_0/src/chainspec/accounts_config/genesis.rs +++ /dev/null @@ -1,497 +0,0 @@ -#[cfg(feature = "datasize")] -use datasize::DataSize; -use num_traits::Zero; -use rand::{ - distributions::{Distribution, Standard}, - Rng, -}; -use serde::{Deserialize, Serialize}; - -use crate::{ - account::AccountHash, - bytesrepr, - bytesrepr::{FromBytes, ToBytes, U8_SERIALIZED_LENGTH}, - system::auction::DelegationRate, - Motes, PublicKey, SecretKey, -}; - -const TAG_LENGTH: usize = U8_SERIALIZED_LENGTH; - -#[repr(u8)] -enum GenesisAccountTag { - System = 0, - Account = 1, - Delegator = 2, - Administrator = 3, -} - -/// Represents details about genesis account's validator status. -#[derive(Debug, Copy, Clone, PartialEq, Eq, Serialize, Deserialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -pub struct GenesisValidator { - /// Stake of a genesis validator. - bonded_amount: Motes, - /// Delegation rate in the range of 0-100. - delegation_rate: DelegationRate, -} - -impl ToBytes for GenesisValidator { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - buffer.extend(self.bonded_amount.to_bytes()?); - buffer.extend(self.delegation_rate.to_bytes()?); - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - self.bonded_amount.serialized_length() + self.delegation_rate.serialized_length() - } -} - -impl FromBytes for GenesisValidator { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (bonded_amount, remainder) = FromBytes::from_bytes(bytes)?; - let (delegation_rate, remainder) = FromBytes::from_bytes(remainder)?; - let genesis_validator = GenesisValidator { - bonded_amount, - delegation_rate, - }; - Ok((genesis_validator, remainder)) - } -} - -impl GenesisValidator { - /// Creates new [`GenesisValidator`]. - pub fn new(bonded_amount: Motes, delegation_rate: DelegationRate) -> Self { - Self { - bonded_amount, - delegation_rate, - } - } - - /// Returns the bonded amount of a genesis validator. - pub fn bonded_amount(&self) -> Motes { - self.bonded_amount - } - - /// Returns the delegation rate of a genesis validator. - pub fn delegation_rate(&self) -> DelegationRate { - self.delegation_rate - } -} - -impl Distribution for Standard { - fn sample(&self, rng: &mut R) -> GenesisValidator { - let bonded_amount = Motes::new(rng.gen()); - let delegation_rate = rng.gen(); - - GenesisValidator::new(bonded_amount, delegation_rate) - } -} - -/// Special account in the system that is useful only for some private chains. -#[derive(Debug, Clone, PartialEq, Eq, Ord, PartialOrd, Serialize, Deserialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -pub struct AdministratorAccount { - public_key: PublicKey, - balance: Motes, -} - -impl AdministratorAccount { - /// Creates new special account. - pub fn new(public_key: PublicKey, balance: Motes) -> Self { - Self { - public_key, - balance, - } - } - - /// Gets a reference to the administrator account's public key. - pub fn public_key(&self) -> &PublicKey { - &self.public_key - } -} - -impl ToBytes for AdministratorAccount { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let AdministratorAccount { - public_key, - balance, - } = self; - let mut buffer = bytesrepr::allocate_buffer(self)?; - buffer.extend(public_key.to_bytes()?); - buffer.extend(balance.to_bytes()?); - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - let AdministratorAccount { - public_key, - balance, - } = self; - public_key.serialized_length() + balance.serialized_length() - } -} - -impl FromBytes for AdministratorAccount { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (public_key, remainder) = FromBytes::from_bytes(bytes)?; - let (balance, remainder) = FromBytes::from_bytes(remainder)?; - let administrator_account = AdministratorAccount { - public_key, - balance, - }; - Ok((administrator_account, remainder)) - } -} - -/// This enum represents possible states of a genesis account. -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -pub enum GenesisAccount { - /// This variant is for internal use only - genesis process will create a virtual system - /// account and use it to call system contracts. - System, - /// Genesis account that will be created. - Account { - /// Public key of a genesis account. - public_key: PublicKey, - /// Starting balance of a genesis account. - balance: Motes, - /// If set, it will make this account a genesis validator. - validator: Option, - }, - /// The genesis delegator is a special account that will be created as a delegator. - /// It does not have any stake of its own, but will create a real account in the system - /// which will delegate to a genesis validator. - Delegator { - /// Validator's public key that has to refer to other instance of - /// [`GenesisAccount::Account`] with a `validator` field set. - validator_public_key: PublicKey, - /// Public key of the genesis account that will be created as part of this entry. - delegator_public_key: PublicKey, - /// Starting balance of the account. - balance: Motes, - /// Delegated amount for given `validator_public_key`. - delegated_amount: Motes, - }, - /// An administrative account in the genesis process. - /// - /// This variant makes sense for some private chains. - Administrator(AdministratorAccount), -} - -impl From for GenesisAccount { - fn from(v: AdministratorAccount) -> Self { - Self::Administrator(v) - } -} - -impl GenesisAccount { - /// Create a system account variant. - pub fn system() -> Self { - Self::System - } - - /// Create a standard account variant. - pub fn account( - public_key: PublicKey, - balance: Motes, - validator: Option, - ) -> Self { - Self::Account { - public_key, - balance, - validator, - } - } - - /// Create a delegator account variant. - pub fn delegator( - validator_public_key: PublicKey, - delegator_public_key: PublicKey, - balance: Motes, - delegated_amount: Motes, - ) -> Self { - Self::Delegator { - validator_public_key, - delegator_public_key, - balance, - delegated_amount, - } - } - - /// The public key (if any) associated with the account. - pub fn public_key(&self) -> PublicKey { - match self { - GenesisAccount::System => PublicKey::System, - GenesisAccount::Account { public_key, .. } => public_key.clone(), - GenesisAccount::Delegator { - delegator_public_key, - .. - } => delegator_public_key.clone(), - GenesisAccount::Administrator(AdministratorAccount { public_key, .. }) => { - public_key.clone() - } - } - } - - /// The account hash for the account. - pub fn account_hash(&self) -> AccountHash { - match self { - GenesisAccount::System => PublicKey::System.to_account_hash(), - GenesisAccount::Account { public_key, .. } => public_key.to_account_hash(), - GenesisAccount::Delegator { - delegator_public_key, - .. - } => delegator_public_key.to_account_hash(), - GenesisAccount::Administrator(AdministratorAccount { public_key, .. }) => { - public_key.to_account_hash() - } - } - } - - /// How many motes are to be deposited in the account's main purse. - pub fn balance(&self) -> Motes { - match self { - GenesisAccount::System => Motes::zero(), - GenesisAccount::Account { balance, .. } => *balance, - GenesisAccount::Delegator { balance, .. } => *balance, - GenesisAccount::Administrator(AdministratorAccount { balance, .. }) => *balance, - } - } - - /// How many motes are to be staked. - /// - /// Staked accounts are either validators with some amount of bonded stake or delgators with - /// some amount of delegated stake. - pub fn staked_amount(&self) -> Motes { - match self { - GenesisAccount::System { .. } - | GenesisAccount::Account { - validator: None, .. - } => Motes::zero(), - GenesisAccount::Account { - validator: Some(genesis_validator), - .. - } => genesis_validator.bonded_amount(), - GenesisAccount::Delegator { - delegated_amount, .. - } => *delegated_amount, - GenesisAccount::Administrator(AdministratorAccount { - public_key: _, - balance: _, - }) => { - // This is defaulted to zero because administrator accounts are filtered out before - // validator set is created at the genesis. - Motes::zero() - } - } - } - - /// What is the delegation rate of a validator. - pub fn delegation_rate(&self) -> DelegationRate { - match self { - GenesisAccount::Account { - validator: Some(genesis_validator), - .. - } => genesis_validator.delegation_rate(), - GenesisAccount::System - | GenesisAccount::Account { - validator: None, .. - } - | GenesisAccount::Delegator { .. } => { - // This value represents a delegation rate in invalid state that system is supposed - // to reject if used. - DelegationRate::max_value() - } - GenesisAccount::Administrator(AdministratorAccount { .. }) => { - DelegationRate::max_value() - } - } - } - - /// Is this a virtual system account. - pub fn is_system_account(&self) -> bool { - matches!(self, GenesisAccount::System { .. }) - } - - /// Is this a validator account. - pub fn is_validator(&self) -> bool { - match self { - GenesisAccount::Account { - validator: Some(_), .. - } => true, - GenesisAccount::System { .. } - | GenesisAccount::Account { - validator: None, .. - } - | GenesisAccount::Delegator { .. } - | GenesisAccount::Administrator(AdministratorAccount { .. }) => false, - } - } - - /// Details about the genesis validator. - pub fn validator(&self) -> Option<&GenesisValidator> { - match self { - GenesisAccount::Account { - validator: Some(genesis_validator), - .. - } => Some(genesis_validator), - _ => None, - } - } - - /// Is this a delegator account. - pub fn is_delegator(&self) -> bool { - matches!(self, GenesisAccount::Delegator { .. }) - } - - /// Details about the genesis delegator. - pub fn as_delegator(&self) -> Option<(&PublicKey, &PublicKey, &Motes, &Motes)> { - match self { - GenesisAccount::Delegator { - validator_public_key, - delegator_public_key, - balance, - delegated_amount, - } => Some(( - validator_public_key, - delegator_public_key, - balance, - delegated_amount, - )), - _ => None, - } - } - - /// Gets the administrator account variant. - pub fn as_administrator_account(&self) -> Option<&AdministratorAccount> { - if let Self::Administrator(v) = self { - Some(v) - } else { - None - } - } -} - -impl Distribution for Standard { - fn sample(&self, rng: &mut R) -> GenesisAccount { - let mut bytes = [0u8; 32]; - rng.fill_bytes(&mut bytes[..]); - let secret_key = SecretKey::ed25519_from_bytes(bytes).unwrap(); - let public_key = PublicKey::from(&secret_key); - let balance = Motes::new(rng.gen()); - let validator = rng.gen(); - - GenesisAccount::account(public_key, balance, validator) - } -} - -impl ToBytes for GenesisAccount { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - match self { - GenesisAccount::System => { - buffer.push(GenesisAccountTag::System as u8); - } - GenesisAccount::Account { - public_key, - balance, - validator, - } => { - buffer.push(GenesisAccountTag::Account as u8); - buffer.extend(public_key.to_bytes()?); - buffer.extend(balance.value().to_bytes()?); - buffer.extend(validator.to_bytes()?); - } - GenesisAccount::Delegator { - validator_public_key, - delegator_public_key, - balance, - delegated_amount, - } => { - buffer.push(GenesisAccountTag::Delegator as u8); - buffer.extend(validator_public_key.to_bytes()?); - buffer.extend(delegator_public_key.to_bytes()?); - buffer.extend(balance.value().to_bytes()?); - buffer.extend(delegated_amount.value().to_bytes()?); - } - GenesisAccount::Administrator(administrator_account) => { - buffer.push(GenesisAccountTag::Administrator as u8); - buffer.extend(administrator_account.to_bytes()?); - } - } - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - match self { - GenesisAccount::System => TAG_LENGTH, - GenesisAccount::Account { - public_key, - balance, - validator, - } => { - public_key.serialized_length() - + balance.value().serialized_length() - + validator.serialized_length() - + TAG_LENGTH - } - GenesisAccount::Delegator { - validator_public_key, - delegator_public_key, - balance, - delegated_amount, - } => { - validator_public_key.serialized_length() - + delegator_public_key.serialized_length() - + balance.value().serialized_length() - + delegated_amount.value().serialized_length() - + TAG_LENGTH - } - GenesisAccount::Administrator(administrator_account) => { - administrator_account.serialized_length() + TAG_LENGTH - } - } - } -} - -impl FromBytes for GenesisAccount { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (tag, remainder) = u8::from_bytes(bytes)?; - match tag { - tag if tag == GenesisAccountTag::System as u8 => { - let genesis_account = GenesisAccount::system(); - Ok((genesis_account, remainder)) - } - tag if tag == GenesisAccountTag::Account as u8 => { - let (public_key, remainder) = FromBytes::from_bytes(remainder)?; - let (balance, remainder) = FromBytes::from_bytes(remainder)?; - let (validator, remainder) = FromBytes::from_bytes(remainder)?; - let genesis_account = GenesisAccount::account(public_key, balance, validator); - Ok((genesis_account, remainder)) - } - tag if tag == GenesisAccountTag::Delegator as u8 => { - let (validator_public_key, remainder) = FromBytes::from_bytes(remainder)?; - let (delegator_public_key, remainder) = FromBytes::from_bytes(remainder)?; - let (balance, remainder) = FromBytes::from_bytes(remainder)?; - let (delegated_amount_value, remainder) = FromBytes::from_bytes(remainder)?; - let genesis_account = GenesisAccount::delegator( - validator_public_key, - delegator_public_key, - balance, - Motes::new(delegated_amount_value), - ); - Ok((genesis_account, remainder)) - } - tag if tag == GenesisAccountTag::Administrator as u8 => { - let (administrator_account, remainder) = - AdministratorAccount::from_bytes(remainder)?; - let genesis_account = GenesisAccount::Administrator(administrator_account); - Ok((genesis_account, remainder)) - } - _ => Err(bytesrepr::Error::Formatting), - } - } -} diff --git a/casper_types_ver_2_0/src/chainspec/accounts_config/validator_config.rs b/casper_types_ver_2_0/src/chainspec/accounts_config/validator_config.rs deleted file mode 100644 index 588faa49..00000000 --- a/casper_types_ver_2_0/src/chainspec/accounts_config/validator_config.rs +++ /dev/null @@ -1,102 +0,0 @@ -#[cfg(feature = "datasize")] -use datasize::DataSize; -use num::Zero; -#[cfg(any(feature = "testing", test))] -use rand::{distributions::Standard, prelude::*}; -use serde::{Deserialize, Serialize}; - -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - system::auction::DelegationRate, - GenesisValidator, Motes, -}; -#[cfg(any(feature = "testing", test))] -use crate::{testing::TestRng, U512}; - -/// Validator account configuration. -#[derive(PartialEq, Eq, Ord, PartialOrd, Serialize, Deserialize, Debug, Copy, Clone)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -pub struct ValidatorConfig { - bonded_amount: Motes, - #[serde(default = "DelegationRate::zero")] - delegation_rate: DelegationRate, -} - -impl ValidatorConfig { - /// Creates a new `ValidatorConfig`. - pub fn new(bonded_amount: Motes, delegation_rate: DelegationRate) -> Self { - Self { - bonded_amount, - delegation_rate, - } - } - - /// Delegation rate. - pub fn delegation_rate(&self) -> DelegationRate { - self.delegation_rate - } - - /// Bonded amount. - pub fn bonded_amount(&self) -> Motes { - self.bonded_amount - } - - /// Returns a random `ValidatorConfig`. - #[cfg(any(feature = "testing", test))] - pub fn random(rng: &mut TestRng) -> Self { - let bonded_amount = Motes::new(U512::from(rng.gen::())); - let delegation_rate = rng.gen(); - - ValidatorConfig { - bonded_amount, - delegation_rate, - } - } -} - -#[cfg(any(feature = "testing", test))] -impl Distribution for Standard { - fn sample(&self, rng: &mut R) -> ValidatorConfig { - let mut u512_array = [0; 64]; - rng.fill_bytes(u512_array.as_mut()); - let bonded_amount = Motes::new(U512::from(u512_array)); - - let delegation_rate = rng.gen(); - - ValidatorConfig::new(bonded_amount, delegation_rate) - } -} - -impl ToBytes for ValidatorConfig { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - buffer.extend(self.bonded_amount.to_bytes()?); - buffer.extend(self.delegation_rate.to_bytes()?); - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - self.bonded_amount.serialized_length() + self.delegation_rate.serialized_length() - } -} - -impl FromBytes for ValidatorConfig { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (bonded_amount, remainder) = FromBytes::from_bytes(bytes)?; - let (delegation_rate, remainder) = FromBytes::from_bytes(remainder)?; - let account_config = ValidatorConfig { - bonded_amount, - delegation_rate, - }; - Ok((account_config, remainder)) - } -} - -impl From for GenesisValidator { - fn from(account_config: ValidatorConfig) -> Self { - GenesisValidator::new( - account_config.bonded_amount(), - account_config.delegation_rate, - ) - } -} diff --git a/casper_types_ver_2_0/src/chainspec/activation_point.rs b/casper_types_ver_2_0/src/chainspec/activation_point.rs deleted file mode 100644 index 1410adea..00000000 --- a/casper_types_ver_2_0/src/chainspec/activation_point.rs +++ /dev/null @@ -1,121 +0,0 @@ -use std::fmt::{self, Display, Formatter}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(any(feature = "testing", test))] -use rand::Rng; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -#[cfg(any(feature = "testing", test))] -use crate::testing::TestRng; -use crate::{ - bytesrepr::{self, FromBytes, ToBytes, U8_SERIALIZED_LENGTH}, - EraId, Timestamp, -}; - -const ERA_ID_TAG: u8 = 0; -const GENESIS_TAG: u8 = 1; - -/// The first era to which the associated protocol version applies. -#[derive(Copy, Clone, PartialEq, Eq, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[serde(untagged)] -pub enum ActivationPoint { - /// Era id. - EraId(EraId), - /// Genesis timestamp. - Genesis(Timestamp), -} - -impl ActivationPoint { - /// Returns whether we should upgrade the node due to the next era being the upgrade activation - /// point. - pub fn should_upgrade(&self, era_being_deactivated: &EraId) -> bool { - match self { - ActivationPoint::EraId(era_id) => era_being_deactivated.successor() >= *era_id, - ActivationPoint::Genesis(_) => false, - } - } - - /// Returns the Era ID if `self` is of `EraId` variant, or else 0 if `Genesis`. - pub fn era_id(&self) -> EraId { - match self { - ActivationPoint::EraId(era_id) => *era_id, - ActivationPoint::Genesis(_) => EraId::from(0), - } - } - - /// Returns the timestamp if `self` is of `Genesis` variant, or else `None`. - pub fn genesis_timestamp(&self) -> Option { - match self { - ActivationPoint::EraId(_) => None, - ActivationPoint::Genesis(timestamp) => Some(*timestamp), - } - } - - /// Returns a random `ActivationPoint`. - #[cfg(any(feature = "testing", test))] - pub fn random(rng: &mut TestRng) -> Self { - if rng.gen() { - ActivationPoint::EraId(EraId::random(rng)) - } else { - ActivationPoint::Genesis(Timestamp::random(rng)) - } - } -} - -impl Display for ActivationPoint { - fn fmt(&self, formatter: &mut Formatter<'_>) -> fmt::Result { - match self { - ActivationPoint::EraId(era_id) => write!(formatter, "activation point {}", era_id), - ActivationPoint::Genesis(timestamp) => { - write!(formatter, "activation point {}", timestamp) - } - } - } -} - -impl ToBytes for ActivationPoint { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - match self { - ActivationPoint::EraId(era_id) => { - let mut buffer = vec![ERA_ID_TAG]; - buffer.extend(era_id.to_bytes()?); - Ok(buffer) - } - ActivationPoint::Genesis(timestamp) => { - let mut buffer = vec![GENESIS_TAG]; - buffer.extend(timestamp.to_bytes()?); - Ok(buffer) - } - } - } - - fn serialized_length(&self) -> usize { - U8_SERIALIZED_LENGTH - + match self { - ActivationPoint::EraId(era_id) => era_id.serialized_length(), - ActivationPoint::Genesis(timestamp) => timestamp.serialized_length(), - } - } -} - -impl FromBytes for ActivationPoint { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (tag, remainder) = u8::from_bytes(bytes)?; - match tag { - ERA_ID_TAG => { - let (era_id, remainder) = EraId::from_bytes(remainder)?; - Ok((ActivationPoint::EraId(era_id), remainder)) - } - GENESIS_TAG => { - let (timestamp, remainder) = Timestamp::from_bytes(remainder)?; - Ok((ActivationPoint::Genesis(timestamp), remainder)) - } - _ => Err(bytesrepr::Error::Formatting), - } - } -} diff --git a/casper_types_ver_2_0/src/chainspec/chainspec_raw_bytes.rs b/casper_types_ver_2_0/src/chainspec/chainspec_raw_bytes.rs deleted file mode 100644 index 37c8347d..00000000 --- a/casper_types_ver_2_0/src/chainspec/chainspec_raw_bytes.rs +++ /dev/null @@ -1,196 +0,0 @@ -use core::fmt::{self, Debug, Display, Formatter}; - -use crate::bytesrepr::{self, Bytes, FromBytes, ToBytes}; -#[cfg(any(feature = "testing", test))] -use crate::testing::TestRng; -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -/// The raw bytes of the chainspec.toml, genesis accounts.toml, and global_state.toml files. -#[derive(Clone, PartialEq, Eq, Serialize, Deserialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -pub struct ChainspecRawBytes { - /// Raw bytes of the current chainspec.toml file. - chainspec_bytes: Bytes, - /// Raw bytes of the current genesis accounts.toml file. - maybe_genesis_accounts_bytes: Option, - /// Raw bytes of the current global_state.toml file. - maybe_global_state_bytes: Option, -} - -impl ChainspecRawBytes { - /// Create an instance from parts. - pub fn new( - chainspec_bytes: Bytes, - maybe_genesis_accounts_bytes: Option, - maybe_global_state_bytes: Option, - ) -> Self { - ChainspecRawBytes { - chainspec_bytes, - maybe_genesis_accounts_bytes, - maybe_global_state_bytes, - } - } - - /// The bytes of the chainspec file. - pub fn chainspec_bytes(&self) -> &[u8] { - self.chainspec_bytes.as_slice() - } - - /// The bytes of global state account entries, when present for a protocol version. - pub fn maybe_genesis_accounts_bytes(&self) -> Option<&[u8]> { - match self.maybe_genesis_accounts_bytes.as_ref() { - Some(bytes) => Some(bytes.as_slice()), - None => None, - } - } - - /// The bytes of global state update entries, when present for a protocol version. - pub fn maybe_global_state_bytes(&self) -> Option<&[u8]> { - match self.maybe_global_state_bytes.as_ref() { - Some(bytes) => Some(bytes.as_slice()), - None => None, - } - } - - /// Returns a random `ChainspecRawBytes`. - #[cfg(any(feature = "testing", test))] - pub fn random(rng: &mut TestRng) -> Self { - use rand::Rng; - - let chainspec_bytes = Bytes::from(rng.random_vec(0..1024)); - let maybe_genesis_accounts_bytes = rng - .gen::() - .then(|| Bytes::from(rng.random_vec(0..1024))); - let maybe_global_state_bytes = rng - .gen::() - .then(|| Bytes::from(rng.random_vec(0..1024))); - ChainspecRawBytes { - chainspec_bytes, - maybe_genesis_accounts_bytes, - maybe_global_state_bytes, - } - } -} - -impl Debug for ChainspecRawBytes { - fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { - let genesis_accounts_bytes_owned: Bytes; - let global_state_bytes_owned: Bytes; - f.debug_struct("ChainspecRawBytes") - .field( - "chainspec_bytes", - &self.chainspec_bytes[0..16].to_ascii_uppercase(), - ) - .field( - "maybe_genesis_accounts_bytes", - match self.maybe_genesis_accounts_bytes.as_ref() { - Some(genesis_accounts_bytes) => { - genesis_accounts_bytes_owned = - genesis_accounts_bytes[0..16].to_ascii_uppercase().into(); - &genesis_accounts_bytes_owned - } - None => &self.maybe_genesis_accounts_bytes, - }, - ) - .field( - "maybe_global_state_bytes", - match self.maybe_global_state_bytes.as_ref() { - Some(global_state_bytes) => { - global_state_bytes_owned = - global_state_bytes[0..16].to_ascii_uppercase().into(); - &global_state_bytes_owned - } - None => &self.maybe_global_state_bytes, - }, - ) - .finish() - } -} - -impl Display for ChainspecRawBytes { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - write!( - formatter, - "{}", - String::from_utf8_lossy(&self.chainspec_bytes) - )?; - if let Some(genesis_accounts_bytes) = &self.maybe_genesis_accounts_bytes { - write!( - formatter, - "{}", - String::from_utf8_lossy(genesis_accounts_bytes) - )?; - } - if let Some(global_state_bytes) = &self.maybe_global_state_bytes { - write!(formatter, "{}", String::from_utf8_lossy(global_state_bytes))?; - } - Ok(()) - } -} - -impl ToBytes for ChainspecRawBytes { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - let ChainspecRawBytes { - chainspec_bytes, - maybe_genesis_accounts_bytes, - maybe_global_state_bytes, - } = self; - - chainspec_bytes.write_bytes(writer)?; - maybe_genesis_accounts_bytes.write_bytes(writer)?; - maybe_global_state_bytes.write_bytes(writer) - } - - fn serialized_length(&self) -> usize { - let ChainspecRawBytes { - chainspec_bytes, - maybe_genesis_accounts_bytes, - maybe_global_state_bytes, - } = self; - chainspec_bytes.serialized_length() - + maybe_genesis_accounts_bytes.serialized_length() - + maybe_global_state_bytes.serialized_length() - } -} - -impl FromBytes for ChainspecRawBytes { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (chainspec_bytes, remainder) = FromBytes::from_bytes(bytes)?; - let (maybe_genesis_accounts_bytes, remainder) = FromBytes::from_bytes(remainder)?; - let (maybe_global_state_bytes, remainder) = FromBytes::from_bytes(remainder)?; - - Ok(( - ChainspecRawBytes { - chainspec_bytes, - maybe_genesis_accounts_bytes, - maybe_global_state_bytes, - }, - remainder, - )) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::testing::TestRng; - - #[test] - fn bytesrepr_roundtrip() { - let rng = &mut TestRng::new(); - - let val = ChainspecRawBytes::random(rng); - bytesrepr::test_serialization_roundtrip(&val); - } -} diff --git a/casper_types_ver_2_0/src/chainspec/core_config.rs b/casper_types_ver_2_0/src/chainspec/core_config.rs deleted file mode 100644 index 8f5b5821..00000000 --- a/casper_types_ver_2_0/src/chainspec/core_config.rs +++ /dev/null @@ -1,538 +0,0 @@ -use alloc::collections::BTreeSet; -#[cfg(feature = "datasize")] -use datasize::DataSize; -use num::rational::Ratio; -#[cfg(any(feature = "testing", test))] -use rand::{ - distributions::{Distribution, Standard}, - Rng, -}; - -use serde::{ - de::{Deserializer, Error as DeError}, - Deserialize, Serialize, Serializer, -}; - -#[cfg(any(feature = "testing", test))] -use crate::testing::TestRng; -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - ProtocolVersion, PublicKey, TimeDiff, -}; - -use super::{fee_handling::FeeHandling, refund_handling::RefundHandling}; - -/// Configuration values associated with the core protocol. -#[derive(Clone, PartialEq, Eq, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -// Disallow unknown fields to ensure config files and command-line overrides contain valid keys. -#[serde(deny_unknown_fields)] -pub struct CoreConfig { - /// Duration of an era. - pub era_duration: TimeDiff, - - /// Minimum era height. - pub minimum_era_height: u64, - - /// Minimum block time. - pub minimum_block_time: TimeDiff, - - /// Validator slots. - pub validator_slots: u32, - - /// Finality threshold fraction. - #[cfg_attr(feature = "datasize", data_size(skip))] - pub finality_threshold_fraction: Ratio, - - /// Protocol version from which nodes are required to hold strict finality signatures. - pub start_protocol_version_with_strict_finality_signatures_required: ProtocolVersion, - - /// Which finality is required for legacy blocks. - /// Used to determine finality sufficiency for new joiners syncing blocks created - /// in a protocol version before - /// `start_protocol_version_with_strict_finality_signatures_required`. - pub legacy_required_finality: LegacyRequiredFinality, - - /// Number of eras before an auction actually defines the set of validators. - /// If you bond with a sufficient bid in era N, you will be a validator in era N + - /// auction_delay + 1 - pub auction_delay: u64, - - /// The period after genesis during which a genesis validator's bid is locked. - pub locked_funds_period: TimeDiff, - - /// The period in which genesis validator's bid is released over time after it's unlocked. - pub vesting_schedule_period: TimeDiff, - - /// The delay in number of eras for paying out the unbonding amount. - pub unbonding_delay: u64, - - /// Round seigniorage rate represented as a fractional number. - #[cfg_attr(feature = "datasize", data_size(skip))] - pub round_seigniorage_rate: Ratio, - - /// Maximum number of associated keys for a single account. - pub max_associated_keys: u32, - - /// Maximum height of contract runtime call stack. - pub max_runtime_call_stack_height: u32, - - /// The minimum bound of motes that can be delegated to a validator. - pub minimum_delegation_amount: u64, - - /// Global state prune batch size (0 means the feature is off in the current protocol version). - pub prune_batch_size: u64, - - /// Enables strict arguments checking when calling a contract. - pub strict_argument_checking: bool, - - /// How many peers to simultaneously ask when sync leaping. - pub simultaneous_peer_requests: u8, - - /// Which consensus protocol to use. - pub consensus_protocol: ConsensusProtocolName, - - /// The maximum amount of delegators per validator. - /// if the value is 0, there is no maximum capacity. - pub max_delegators_per_validator: u32, - - /// The split in finality signature rewards between block producer and participating signers. - #[cfg_attr(feature = "datasize", data_size(skip))] - pub finders_fee: Ratio, - - /// The proportion of baseline rewards going to reward finality signatures specifically. - #[cfg_attr(feature = "datasize", data_size(skip))] - pub finality_signature_proportion: Ratio, - - /// Lookback interval indicating which past block we are looking at to reward. - pub signature_rewards_max_delay: u64, - /// Auction entrypoints such as "add_bid" or "delegate" are disabled if this flag is set to - /// `false`. Setting up this option makes sense only for private chains where validator set - /// rotation is unnecessary. - pub allow_auction_bids: bool, - /// Allows unrestricted transfers between users. - pub allow_unrestricted_transfers: bool, - /// If set to false then consensus doesn't compute rewards and always uses 0. - pub compute_rewards: bool, - /// Administrative accounts are a valid option for a private chain only. - #[serde(default, skip_serializing_if = "BTreeSet::is_empty")] - pub administrators: BTreeSet, - /// Refund handling. - #[cfg_attr(feature = "datasize", data_size(skip))] - pub refund_handling: RefundHandling, - /// Fee handling. - pub fee_handling: FeeHandling, -} - -impl CoreConfig { - /// The number of eras that have already started and whose validators are still bonded. - pub fn recent_era_count(&self) -> u64 { - // Safe to use naked `-` operation assuming `CoreConfig::is_valid()` has been checked. - self.unbonding_delay - self.auction_delay - } - - /// The proportion of the total rewards going to block production. - pub fn production_rewards_proportion(&self) -> Ratio { - Ratio::new(1, 1) - self.finality_signature_proportion - } - - /// The finder's fee, *i.e.* the proportion of the total rewards going to the validator - /// collecting the finality signatures which is the validator producing the block. - pub fn collection_rewards_proportion(&self) -> Ratio { - self.finders_fee * self.finality_signature_proportion - } - - /// The proportion of the total rewards going to finality signatures collection. - pub fn contribution_rewards_proportion(&self) -> Ratio { - (Ratio::new(1, 1) - self.finders_fee) * self.finality_signature_proportion - } -} - -#[cfg(any(feature = "testing", test))] -impl CoreConfig { - /// Generates a random instance using a `TestRng`. - pub fn random(rng: &mut TestRng) -> Self { - let era_duration = TimeDiff::from_seconds(rng.gen_range(600..604_800)); - let minimum_era_height = rng.gen_range(5..100); - let minimum_block_time = TimeDiff::from_seconds(rng.gen_range(1..60)); - let validator_slots = rng.gen_range(1..10_000); - let finality_threshold_fraction = Ratio::new(rng.gen_range(1..100), 100); - let start_protocol_version_with_strict_finality_signatures_required = - ProtocolVersion::from_parts(1, rng.gen_range(5..10), rng.gen_range(0..100)); - let legacy_required_finality = rng.gen(); - let auction_delay = rng.gen_range(1..5); - let locked_funds_period = TimeDiff::from_seconds(rng.gen_range(600..604_800)); - let vesting_schedule_period = TimeDiff::from_seconds(rng.gen_range(600..604_800)); - let unbonding_delay = rng.gen_range((auction_delay + 1)..1_000_000_000); - let round_seigniorage_rate = Ratio::new( - rng.gen_range(1..1_000_000_000), - rng.gen_range(1..1_000_000_000), - ); - let max_associated_keys = rng.gen(); - let max_runtime_call_stack_height = rng.gen(); - let minimum_delegation_amount = rng.gen::() as u64; - let prune_batch_size = rng.gen_range(0..100); - let strict_argument_checking = rng.gen(); - let simultaneous_peer_requests = rng.gen_range(3..100); - let consensus_protocol = rng.gen(); - let finders_fee = Ratio::new(rng.gen_range(1..100), 100); - let finality_signature_proportion = Ratio::new(rng.gen_range(1..100), 100); - let signature_rewards_max_delay = rng.gen_range(1..10); - let allow_auction_bids = rng.gen(); - let allow_unrestricted_transfers = rng.gen(); - let compute_rewards = rng.gen(); - let administrators = (0..rng.gen_range(0..=10u32)) - .map(|_| PublicKey::random(rng)) - .collect(); - let refund_handling = { - let numer = rng.gen_range(0..=100); - let refund_ratio = Ratio::new(numer, 100); - RefundHandling::Refund { refund_ratio } - }; - - let fee_handling = if rng.gen() { - FeeHandling::PayToProposer - } else { - FeeHandling::Accumulate - }; - - CoreConfig { - era_duration, - minimum_era_height, - minimum_block_time, - validator_slots, - finality_threshold_fraction, - start_protocol_version_with_strict_finality_signatures_required, - legacy_required_finality, - auction_delay, - locked_funds_period, - vesting_schedule_period, - unbonding_delay, - round_seigniorage_rate, - max_associated_keys, - max_runtime_call_stack_height, - minimum_delegation_amount, - prune_batch_size, - strict_argument_checking, - simultaneous_peer_requests, - consensus_protocol, - max_delegators_per_validator: 0, - finders_fee, - finality_signature_proportion, - signature_rewards_max_delay, - allow_auction_bids, - administrators, - allow_unrestricted_transfers, - compute_rewards, - refund_handling, - fee_handling, - } - } -} - -impl ToBytes for CoreConfig { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - buffer.extend(self.era_duration.to_bytes()?); - buffer.extend(self.minimum_era_height.to_bytes()?); - buffer.extend(self.minimum_block_time.to_bytes()?); - buffer.extend(self.validator_slots.to_bytes()?); - buffer.extend(self.finality_threshold_fraction.to_bytes()?); - buffer.extend( - self.start_protocol_version_with_strict_finality_signatures_required - .to_bytes()?, - ); - buffer.extend(self.legacy_required_finality.to_bytes()?); - buffer.extend(self.auction_delay.to_bytes()?); - buffer.extend(self.locked_funds_period.to_bytes()?); - buffer.extend(self.vesting_schedule_period.to_bytes()?); - buffer.extend(self.unbonding_delay.to_bytes()?); - buffer.extend(self.round_seigniorage_rate.to_bytes()?); - buffer.extend(self.max_associated_keys.to_bytes()?); - buffer.extend(self.max_runtime_call_stack_height.to_bytes()?); - buffer.extend(self.minimum_delegation_amount.to_bytes()?); - buffer.extend(self.prune_batch_size.to_bytes()?); - buffer.extend(self.strict_argument_checking.to_bytes()?); - buffer.extend(self.simultaneous_peer_requests.to_bytes()?); - buffer.extend(self.consensus_protocol.to_bytes()?); - buffer.extend(self.max_delegators_per_validator.to_bytes()?); - buffer.extend(self.finders_fee.to_bytes()?); - buffer.extend(self.finality_signature_proportion.to_bytes()?); - buffer.extend(self.signature_rewards_max_delay.to_bytes()?); - buffer.extend(self.allow_auction_bids.to_bytes()?); - buffer.extend(self.allow_unrestricted_transfers.to_bytes()?); - buffer.extend(self.compute_rewards.to_bytes()?); - buffer.extend(self.administrators.to_bytes()?); - buffer.extend(self.refund_handling.to_bytes()?); - buffer.extend(self.fee_handling.to_bytes()?); - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - self.era_duration.serialized_length() - + self.minimum_era_height.serialized_length() - + self.minimum_block_time.serialized_length() - + self.validator_slots.serialized_length() - + self.finality_threshold_fraction.serialized_length() - + self - .start_protocol_version_with_strict_finality_signatures_required - .serialized_length() - + self.legacy_required_finality.serialized_length() - + self.auction_delay.serialized_length() - + self.locked_funds_period.serialized_length() - + self.vesting_schedule_period.serialized_length() - + self.unbonding_delay.serialized_length() - + self.round_seigniorage_rate.serialized_length() - + self.max_associated_keys.serialized_length() - + self.max_runtime_call_stack_height.serialized_length() - + self.minimum_delegation_amount.serialized_length() - + self.prune_batch_size.serialized_length() - + self.strict_argument_checking.serialized_length() - + self.simultaneous_peer_requests.serialized_length() - + self.consensus_protocol.serialized_length() - + self.max_delegators_per_validator.serialized_length() - + self.finders_fee.serialized_length() - + self.finality_signature_proportion.serialized_length() - + self.signature_rewards_max_delay.serialized_length() - + self.allow_auction_bids.serialized_length() - + self.allow_unrestricted_transfers.serialized_length() - + self.compute_rewards.serialized_length() - + self.administrators.serialized_length() - + self.refund_handling.serialized_length() - + self.fee_handling.serialized_length() - } -} - -impl FromBytes for CoreConfig { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (era_duration, remainder) = TimeDiff::from_bytes(bytes)?; - let (minimum_era_height, remainder) = u64::from_bytes(remainder)?; - let (minimum_block_time, remainder) = TimeDiff::from_bytes(remainder)?; - let (validator_slots, remainder) = u32::from_bytes(remainder)?; - let (finality_threshold_fraction, remainder) = Ratio::::from_bytes(remainder)?; - let (start_protocol_version_with_strict_finality_signatures_required, remainder) = - ProtocolVersion::from_bytes(remainder)?; - let (legacy_required_finality, remainder) = LegacyRequiredFinality::from_bytes(remainder)?; - let (auction_delay, remainder) = u64::from_bytes(remainder)?; - let (locked_funds_period, remainder) = TimeDiff::from_bytes(remainder)?; - let (vesting_schedule_period, remainder) = TimeDiff::from_bytes(remainder)?; - let (unbonding_delay, remainder) = u64::from_bytes(remainder)?; - let (round_seigniorage_rate, remainder) = Ratio::::from_bytes(remainder)?; - let (max_associated_keys, remainder) = u32::from_bytes(remainder)?; - let (max_runtime_call_stack_height, remainder) = u32::from_bytes(remainder)?; - let (minimum_delegation_amount, remainder) = u64::from_bytes(remainder)?; - let (prune_batch_size, remainder) = u64::from_bytes(remainder)?; - let (strict_argument_checking, remainder) = bool::from_bytes(remainder)?; - let (simultaneous_peer_requests, remainder) = u8::from_bytes(remainder)?; - let (consensus_protocol, remainder) = ConsensusProtocolName::from_bytes(remainder)?; - let (max_delegators_per_validator, remainder) = FromBytes::from_bytes(remainder)?; - let (finders_fee, remainder) = Ratio::from_bytes(remainder)?; - let (finality_signature_proportion, remainder) = Ratio::from_bytes(remainder)?; - let (signature_rewards_max_delay, remainder) = u64::from_bytes(remainder)?; - let (allow_auction_bids, remainder) = FromBytes::from_bytes(remainder)?; - let (allow_unrestricted_transfers, remainder) = FromBytes::from_bytes(remainder)?; - let (compute_rewards, remainder) = bool::from_bytes(remainder)?; - let (administrative_accounts, remainder) = FromBytes::from_bytes(remainder)?; - let (refund_handling, remainder) = FromBytes::from_bytes(remainder)?; - let (fee_handling, remainder) = FromBytes::from_bytes(remainder)?; - let config = CoreConfig { - era_duration, - minimum_era_height, - minimum_block_time, - validator_slots, - finality_threshold_fraction, - start_protocol_version_with_strict_finality_signatures_required, - legacy_required_finality, - auction_delay, - locked_funds_period, - vesting_schedule_period, - unbonding_delay, - round_seigniorage_rate, - max_associated_keys, - max_runtime_call_stack_height, - minimum_delegation_amount, - prune_batch_size, - strict_argument_checking, - simultaneous_peer_requests, - consensus_protocol, - max_delegators_per_validator, - finders_fee, - finality_signature_proportion, - signature_rewards_max_delay, - allow_auction_bids, - allow_unrestricted_transfers, - compute_rewards, - administrators: administrative_accounts, - refund_handling, - fee_handling, - }; - Ok((config, remainder)) - } -} - -/// Consensus protocol name. -#[derive(Copy, Clone, PartialEq, Eq, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -pub enum ConsensusProtocolName { - /// Highway. - Highway, - /// Zug. - Zug, -} - -impl Serialize for ConsensusProtocolName { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - match self { - ConsensusProtocolName::Highway => "Highway", - ConsensusProtocolName::Zug => "Zug", - } - .serialize(serializer) - } -} - -impl<'de> Deserialize<'de> for ConsensusProtocolName { - fn deserialize>(deserializer: D) -> Result { - match String::deserialize(deserializer)?.to_lowercase().as_str() { - "highway" => Ok(ConsensusProtocolName::Highway), - "zug" => Ok(ConsensusProtocolName::Zug), - _ => Err(DeError::custom("unknown consensus protocol name")), - } - } -} - -const CONSENSUS_HIGHWAY_TAG: u8 = 0; -const CONSENSUS_ZUG_TAG: u8 = 1; - -impl ToBytes for ConsensusProtocolName { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let tag = match self { - ConsensusProtocolName::Highway => CONSENSUS_HIGHWAY_TAG, - ConsensusProtocolName::Zug => CONSENSUS_ZUG_TAG, - }; - Ok(vec![tag]) - } - - fn serialized_length(&self) -> usize { - 1 - } -} - -impl FromBytes for ConsensusProtocolName { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (tag, remainder) = u8::from_bytes(bytes)?; - let name = match tag { - CONSENSUS_HIGHWAY_TAG => ConsensusProtocolName::Highway, - CONSENSUS_ZUG_TAG => ConsensusProtocolName::Zug, - _ => return Err(bytesrepr::Error::Formatting), - }; - Ok((name, remainder)) - } -} - -#[cfg(any(feature = "testing", test))] -impl Distribution for Standard { - fn sample(&self, rng: &mut R) -> ConsensusProtocolName { - if rng.gen() { - ConsensusProtocolName::Highway - } else { - ConsensusProtocolName::Zug - } - } -} - -/// Which finality a legacy block needs during a fast sync. -#[derive(Copy, Clone, PartialEq, Eq, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -pub enum LegacyRequiredFinality { - /// Strict finality: more than 2/3rd of validators. - Strict, - /// Weak finality: more than 1/3rd of validators. - Weak, - /// Finality always valid. - Any, -} - -impl Serialize for LegacyRequiredFinality { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - match self { - LegacyRequiredFinality::Strict => "Strict", - LegacyRequiredFinality::Weak => "Weak", - LegacyRequiredFinality::Any => "Any", - } - .serialize(serializer) - } -} - -impl<'de> Deserialize<'de> for LegacyRequiredFinality { - fn deserialize>(deserializer: D) -> Result { - match String::deserialize(deserializer)?.to_lowercase().as_str() { - "strict" => Ok(LegacyRequiredFinality::Strict), - "weak" => Ok(LegacyRequiredFinality::Weak), - "any" => Ok(LegacyRequiredFinality::Any), - _ => Err(DeError::custom("unknown legacy required finality")), - } - } -} - -const LEGACY_REQUIRED_FINALITY_STRICT_TAG: u8 = 0; -const LEGACY_REQUIRED_FINALITY_WEAK_TAG: u8 = 1; -const LEGACY_REQUIRED_FINALITY_ANY_TAG: u8 = 2; - -impl ToBytes for LegacyRequiredFinality { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let tag = match self { - LegacyRequiredFinality::Strict => LEGACY_REQUIRED_FINALITY_STRICT_TAG, - LegacyRequiredFinality::Weak => LEGACY_REQUIRED_FINALITY_WEAK_TAG, - LegacyRequiredFinality::Any => LEGACY_REQUIRED_FINALITY_ANY_TAG, - }; - Ok(vec![tag]) - } - - fn serialized_length(&self) -> usize { - 1 - } -} - -impl FromBytes for LegacyRequiredFinality { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (tag, remainder) = u8::from_bytes(bytes)?; - match tag { - LEGACY_REQUIRED_FINALITY_STRICT_TAG => Ok((LegacyRequiredFinality::Strict, remainder)), - LEGACY_REQUIRED_FINALITY_WEAK_TAG => Ok((LegacyRequiredFinality::Weak, remainder)), - LEGACY_REQUIRED_FINALITY_ANY_TAG => Ok((LegacyRequiredFinality::Any, remainder)), - _ => Err(bytesrepr::Error::Formatting), - } - } -} - -#[cfg(any(feature = "testing", test))] -impl Distribution for Standard { - fn sample(&self, rng: &mut R) -> LegacyRequiredFinality { - match rng.gen_range(0..3) { - 0 => LegacyRequiredFinality::Strict, - 1 => LegacyRequiredFinality::Weak, - 2 => LegacyRequiredFinality::Any, - _not_in_range => unreachable!(), - } - } -} - -#[cfg(test)] -mod tests { - use rand::SeedableRng; - - use super::*; - - #[test] - fn bytesrepr_roundtrip() { - let mut rng = TestRng::from_entropy(); - let config = CoreConfig::random(&mut rng); - bytesrepr::test_serialization_roundtrip(&config); - } -} diff --git a/casper_types_ver_2_0/src/chainspec/fee_handling.rs b/casper_types_ver_2_0/src/chainspec/fee_handling.rs deleted file mode 100644 index abd17017..00000000 --- a/casper_types_ver_2_0/src/chainspec/fee_handling.rs +++ /dev/null @@ -1,76 +0,0 @@ -#[cfg(feature = "datasize")] -use datasize::DataSize; -use serde::{Deserialize, Serialize}; - -use crate::bytesrepr::{self, FromBytes, ToBytes}; - -const FEE_HANDLING_PROPOSER_TAG: u8 = 0; -const FEE_HANDLING_ACCUMULATE_TAG: u8 = 1; -const FEE_HANDLING_BURN_TAG: u8 = 2; - -/// Defines how fees are handled in the system. -#[derive(Copy, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -#[serde(tag = "type", rename_all = "snake_case")] -#[cfg_attr(feature = "datasize", derive(DataSize))] -pub enum FeeHandling { - /// Transaction fees are paid to the block proposer. - /// - /// This is the default option for public chains. - PayToProposer, - /// Transaction fees are accumulated in a special purse and then distributed during end of era - /// processing evenly among all administrator accounts. - /// - /// This setting is applicable for some private chains (but not all). - Accumulate, - /// Burn the fees. - Burn, -} - -impl ToBytes for FeeHandling { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - match self { - FeeHandling::PayToProposer => Ok(vec![FEE_HANDLING_PROPOSER_TAG]), - FeeHandling::Accumulate => Ok(vec![FEE_HANDLING_ACCUMULATE_TAG]), - FeeHandling::Burn => Ok(vec![FEE_HANDLING_BURN_TAG]), - } - } - - fn serialized_length(&self) -> usize { - 1 - } -} - -impl FromBytes for FeeHandling { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (tag, rem) = u8::from_bytes(bytes)?; - match tag { - FEE_HANDLING_PROPOSER_TAG => Ok((FeeHandling::PayToProposer, rem)), - FEE_HANDLING_ACCUMULATE_TAG => Ok((FeeHandling::Accumulate, rem)), - FEE_HANDLING_BURN_TAG => Ok((FeeHandling::Burn, rem)), - _ => Err(bytesrepr::Error::Formatting), - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn bytesrepr_roundtrip_for_refund() { - let fee_config = FeeHandling::PayToProposer; - bytesrepr::test_serialization_roundtrip(&fee_config); - } - - #[test] - fn bytesrepr_roundtrip_for_accumulate() { - let fee_config = FeeHandling::Accumulate; - bytesrepr::test_serialization_roundtrip(&fee_config); - } - - #[test] - fn bytesrepr_roundtrip_for_burn() { - let fee_config = FeeHandling::Burn; - bytesrepr::test_serialization_roundtrip(&fee_config); - } -} diff --git a/casper_types_ver_2_0/src/chainspec/global_state_update.rs b/casper_types_ver_2_0/src/chainspec/global_state_update.rs deleted file mode 100644 index 68de870c..00000000 --- a/casper_types_ver_2_0/src/chainspec/global_state_update.rs +++ /dev/null @@ -1,181 +0,0 @@ -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(any(feature = "testing", test))] -use rand::Rng; -use serde::{Deserialize, Serialize}; -use std::{collections::BTreeMap, convert::TryFrom}; -use thiserror::Error; - -#[cfg(any(feature = "testing", test))] -use crate::testing::TestRng; -use crate::{ - bytesrepr::{self, Bytes, FromBytes, ToBytes}, - AsymmetricType, Key, PublicKey, U512, -}; - -#[derive(PartialEq, Eq, Serialize, Deserialize, Debug, Clone)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[serde(deny_unknown_fields)] -pub struct GlobalStateUpdateEntry { - key: String, - value: String, -} - -#[derive(PartialEq, Eq, Serialize, Deserialize, Debug, Clone)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[serde(deny_unknown_fields)] -pub struct GlobalStateUpdateValidatorInfo { - public_key: String, - weight: String, -} - -/// Type storing global state update entries. -#[derive(PartialEq, Eq, Serialize, Deserialize, Debug, Clone)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[serde(deny_unknown_fields)] -pub struct GlobalStateUpdateConfig { - validators: Option>, - entries: Vec, -} - -/// Type storing the information about modifications to be applied to the global state. -/// -/// It stores the serialized `StoredValue`s corresponding to keys to be modified, and for the case -/// where the validator set is being modified in any way, the full set of post-upgrade validators. -#[derive(Clone, PartialEq, Eq, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -pub struct GlobalStateUpdate { - /// Some with all validators (including pre-existent), if any change to the set is made. - pub validators: Option>, - /// Global state key value pairs, which will be directly upserted into global state against - /// the root hash of the final block of the era before the upgrade. - pub entries: BTreeMap, -} - -impl GlobalStateUpdate { - /// Returns a random `GlobalStateUpdate`. - #[cfg(any(feature = "testing", test))] - pub fn random(rng: &mut TestRng) -> Self { - let mut validators = BTreeMap::new(); - if rng.gen() { - let count = rng.gen_range(5..10); - for _ in 0..count { - validators.insert(PublicKey::random(rng), rng.gen::()); - } - } - - let count = rng.gen_range(0..10); - let mut entries = BTreeMap::new(); - for _ in 0..count { - entries.insert(rng.gen(), rng.gen()); - } - - Self { - validators: Some(validators), - entries, - } - } -} - -impl ToBytes for GlobalStateUpdate { - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.validators.write_bytes(writer)?; - self.entries.write_bytes(writer) - } - - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - self.validators.serialized_length() + self.entries.serialized_length() - } -} - -impl FromBytes for GlobalStateUpdate { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (validators, remainder) = Option::>::from_bytes(bytes)?; - let (entries, remainder) = BTreeMap::::from_bytes(remainder)?; - let global_state_update = GlobalStateUpdate { - entries, - validators, - }; - Ok((global_state_update, remainder)) - } -} - -/// Error loading global state update file. -#[derive(Debug, Error)] -pub enum GlobalStateUpdateError { - /// Error while decoding a key from a prefix formatted string. - #[error("decoding key from formatted string error: {0}")] - DecodingKeyFromStr(String), - /// Error while decoding a key from a hex formatted string. - #[error("decoding key from hex string error: {0}")] - DecodingKeyFromHex(String), - /// Error while decoding a public key weight from formatted string. - #[error("decoding weight from decimal string error: {0}")] - DecodingWeightFromStr(String), - /// Error while decoding a serialized value from a base64 encoded string. - #[error("decoding from base64 error: {0}")] - DecodingFromBase64(#[from] base64::DecodeError), -} - -impl TryFrom for GlobalStateUpdate { - type Error = GlobalStateUpdateError; - - fn try_from(config: GlobalStateUpdateConfig) -> Result { - let mut validators: Option> = None; - if let Some(config_validators) = config.validators { - let mut new_validators = BTreeMap::new(); - for (index, validator) in config_validators.into_iter().enumerate() { - let public_key = PublicKey::from_hex(&validator.public_key).map_err(|error| { - GlobalStateUpdateError::DecodingKeyFromHex(format!( - "failed to decode validator public key {}: {:?}", - index, error - )) - })?; - let weight = U512::from_dec_str(&validator.weight).map_err(|error| { - GlobalStateUpdateError::DecodingWeightFromStr(format!( - "failed to decode validator weight {}: {}", - index, error - )) - })?; - let _ = new_validators.insert(public_key, weight); - } - validators = Some(new_validators); - } - - let mut entries = BTreeMap::new(); - for (index, entry) in config.entries.into_iter().enumerate() { - let key = Key::from_formatted_str(&entry.key).map_err(|error| { - GlobalStateUpdateError::DecodingKeyFromStr(format!( - "failed to decode entry key {}: {}", - index, error - )) - })?; - let value = base64::decode(&entry.value)?.into(); - let _ = entries.insert(key, value); - } - - Ok(GlobalStateUpdate { - validators, - entries, - }) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use rand::SeedableRng; - - #[test] - fn global_state_update_bytesrepr_roundtrip() { - let mut rng = TestRng::from_entropy(); - let update = GlobalStateUpdate::random(&mut rng); - bytesrepr::test_serialization_roundtrip(&update); - } -} diff --git a/casper_types_ver_2_0/src/chainspec/highway_config.rs b/casper_types_ver_2_0/src/chainspec/highway_config.rs deleted file mode 100644 index def377c2..00000000 --- a/casper_types_ver_2_0/src/chainspec/highway_config.rs +++ /dev/null @@ -1,111 +0,0 @@ -#[cfg(feature = "datasize")] -use datasize::DataSize; -use num::rational::Ratio; - -#[cfg(any(feature = "testing", test))] -use rand::Rng; -use serde::{Deserialize, Serialize}; - -#[cfg(any(feature = "testing", test))] -use crate::testing::TestRng; -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - TimeDiff, -}; - -/// Configuration values relevant to Highway consensus. -#[derive(Copy, Clone, PartialEq, Eq, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -// Disallow unknown fields to ensure config files and command-line overrides contain valid keys. -#[serde(deny_unknown_fields)] -pub struct HighwayConfig { - /// The upper limit for Highway round lengths. - pub maximum_round_length: TimeDiff, - /// The factor by which rewards for a round are multiplied if the greatest summit has ≤50% - /// quorum, i.e. no finality. - #[cfg_attr(feature = "datasize", data_size(skip))] - pub reduced_reward_multiplier: Ratio, -} - -impl HighwayConfig { - /// Checks whether the values set in the config make sense and returns `false` if they don't. - pub fn is_valid(&self) -> Result<(), String> { - if self.reduced_reward_multiplier > Ratio::new(1, 1) { - Err("reduced reward multiplier is not in the range [0, 1]".to_string()) - } else { - Ok(()) - } - } - - /// Returns a random `HighwayConfig`. - #[cfg(any(feature = "testing", test))] - pub fn random(rng: &mut TestRng) -> Self { - let maximum_round_length = TimeDiff::from_seconds(rng.gen_range(60..600)); - let reduced_reward_multiplier = Ratio::new(rng.gen_range(0..10), 10); - - HighwayConfig { - maximum_round_length, - reduced_reward_multiplier, - } - } -} - -impl ToBytes for HighwayConfig { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - buffer.extend(self.maximum_round_length.to_bytes()?); - buffer.extend(self.reduced_reward_multiplier.to_bytes()?); - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - self.maximum_round_length.serialized_length() - + self.reduced_reward_multiplier.serialized_length() - } -} - -impl FromBytes for HighwayConfig { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (maximum_round_length, remainder) = TimeDiff::from_bytes(bytes)?; - let (reduced_reward_multiplier, remainder) = Ratio::::from_bytes(remainder)?; - let config = HighwayConfig { - maximum_round_length, - reduced_reward_multiplier, - }; - Ok((config, remainder)) - } -} - -#[cfg(test)] -mod tests { - use rand::SeedableRng; - - use super::*; - - #[test] - fn bytesrepr_roundtrip() { - let mut rng = TestRng::from_entropy(); - let config = HighwayConfig::random(&mut rng); - bytesrepr::test_serialization_roundtrip(&config); - } - - #[test] - fn should_validate_for_reduced_reward_multiplier() { - let mut rng = TestRng::from_entropy(); - let mut highway_config = HighwayConfig::random(&mut rng); - - // Should be valid for 0 <= RRM <= 1. - highway_config.reduced_reward_multiplier = Ratio::new(0, 1); - assert!(highway_config.is_valid().is_ok()); - highway_config.reduced_reward_multiplier = Ratio::new(1, 1); - assert!(highway_config.is_valid().is_ok()); - highway_config.reduced_reward_multiplier = Ratio::new(u64::MAX, u64::MAX); - assert!(highway_config.is_valid().is_ok()); - - highway_config.reduced_reward_multiplier = Ratio::new(u64::MAX, u64::MAX - 1); - assert!( - highway_config.is_valid().is_err(), - "Should be invalid for RRM > 1." - ); - } -} diff --git a/casper_types_ver_2_0/src/chainspec/network_config.rs b/casper_types_ver_2_0/src/chainspec/network_config.rs deleted file mode 100644 index 42090c22..00000000 --- a/casper_types_ver_2_0/src/chainspec/network_config.rs +++ /dev/null @@ -1,86 +0,0 @@ -#[cfg(feature = "datasize")] -use datasize::DataSize; - -#[cfg(any(feature = "testing", test))] -use rand::Rng; -use serde::Serialize; - -use crate::bytesrepr::{self, FromBytes, ToBytes}; -#[cfg(any(feature = "testing", test))] -use crate::testing::TestRng; - -use super::AccountsConfig; - -/// Configuration values associated with the network. -#[derive(Clone, PartialEq, Eq, Serialize, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -pub struct NetworkConfig { - /// The network name. - pub name: String, - /// The maximum size of an accepted network message, in bytes. - pub maximum_net_message_size: u32, - /// Validator accounts specified in the chainspec. - // Note: `accounts_config` must be the last field on this struct due to issues in the TOML - // crate - see . - pub accounts_config: AccountsConfig, -} - -impl NetworkConfig { - /// Returns a random `NetworkConfig`. - #[cfg(any(feature = "testing", test))] - pub fn random(rng: &mut TestRng) -> Self { - let name = rng.gen::().to_string(); - let maximum_net_message_size = 4 + rng.gen_range(0..4); - let accounts_config = AccountsConfig::random(rng); - - NetworkConfig { - name, - maximum_net_message_size, - accounts_config, - } - } -} - -impl ToBytes for NetworkConfig { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - buffer.extend(self.name.to_bytes()?); - buffer.extend(self.accounts_config.to_bytes()?); - buffer.extend(self.maximum_net_message_size.to_bytes()?); - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - self.name.serialized_length() - + self.accounts_config.serialized_length() - + self.maximum_net_message_size.serialized_length() - } -} - -impl FromBytes for NetworkConfig { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (name, remainder) = String::from_bytes(bytes)?; - let (accounts_config, remainder) = FromBytes::from_bytes(remainder)?; - let (maximum_net_message_size, remainder) = FromBytes::from_bytes(remainder)?; - let config = NetworkConfig { - name, - maximum_net_message_size, - accounts_config, - }; - Ok((config, remainder)) - } -} - -#[cfg(test)] -mod tests { - use rand::SeedableRng; - - use super::*; - - #[test] - fn bytesrepr_roundtrip() { - let mut rng = TestRng::from_entropy(); - let config = NetworkConfig::random(&mut rng); - bytesrepr::test_serialization_roundtrip(&config); - } -} diff --git a/casper_types_ver_2_0/src/chainspec/next_upgrade.rs b/casper_types_ver_2_0/src/chainspec/next_upgrade.rs deleted file mode 100644 index 897755f9..00000000 --- a/casper_types_ver_2_0/src/chainspec/next_upgrade.rs +++ /dev/null @@ -1,115 +0,0 @@ -use std::fmt::{self, Display, Formatter}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - ActivationPoint, ProtocolConfig, ProtocolVersion, -}; - -#[cfg(test)] -use rand::Rng; - -#[cfg(test)] -use crate::testing::TestRng; - -/// Information about the next protocol upgrade. -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[derive(PartialEq, Eq, Debug, Serialize, Deserialize, Clone)] -pub struct NextUpgrade { - activation_point: ActivationPoint, - protocol_version: ProtocolVersion, -} - -impl NextUpgrade { - /// Creates a new `NextUpgrade`. - pub fn new(activation_point: ActivationPoint, protocol_version: ProtocolVersion) -> Self { - NextUpgrade { - activation_point, - protocol_version, - } - } - - /// Returns the activation point of the next upgrade. - pub fn activation_point(&self) -> ActivationPoint { - self.activation_point - } - - #[cfg(test)] - pub(crate) fn random(rng: &mut TestRng) -> Self { - Self { - activation_point: ActivationPoint::random(rng), - protocol_version: ProtocolVersion::from_parts(rng.gen(), rng.gen(), rng.gen()), - } - } -} - -impl From for NextUpgrade { - fn from(protocol_config: ProtocolConfig) -> Self { - NextUpgrade { - activation_point: protocol_config.activation_point, - protocol_version: protocol_config.version, - } - } -} - -impl Display for NextUpgrade { - fn fmt(&self, formatter: &mut Formatter<'_>) -> fmt::Result { - write!( - formatter, - "next upgrade to {} at start of era {}", - self.protocol_version, - self.activation_point.era_id() - ) - } -} - -impl ToBytes for NextUpgrade { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.activation_point.write_bytes(writer)?; - self.protocol_version.write_bytes(writer) - } - - fn serialized_length(&self) -> usize { - self.activation_point.serialized_length() + self.protocol_version.serialized_length() - } -} - -impl FromBytes for NextUpgrade { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (activation_point, remainder) = ActivationPoint::from_bytes(bytes)?; - let (protocol_version, remainder) = ProtocolVersion::from_bytes(remainder)?; - Ok(( - NextUpgrade { - activation_point, - protocol_version, - }, - remainder, - )) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::testing::TestRng; - - #[test] - fn bytesrepr_roundtrip() { - let rng = &mut TestRng::new(); - - let val = NextUpgrade::random(rng); - bytesrepr::test_serialization_roundtrip(&val); - } -} diff --git a/casper_types_ver_2_0/src/chainspec/protocol_config.rs b/casper_types_ver_2_0/src/chainspec/protocol_config.rs deleted file mode 100644 index f693578f..00000000 --- a/casper_types_ver_2_0/src/chainspec/protocol_config.rs +++ /dev/null @@ -1,125 +0,0 @@ -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(any(feature = "testing", test))] -use rand::Rng; -use serde::{Deserialize, Serialize}; -use std::{collections::BTreeMap, str::FromStr}; - -#[cfg(any(feature = "testing", test))] -use crate::testing::TestRng; -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - Key, ProtocolVersion, StoredValue, -}; - -use crate::{ActivationPoint, GlobalStateUpdate}; - -/// Configuration values associated with the protocol. -#[derive(Clone, PartialEq, Eq, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -pub struct ProtocolConfig { - /// Protocol version. - #[cfg_attr(feature = "datasize", data_size(skip))] - pub version: ProtocolVersion, - /// Whether we need to clear latest blocks back to the switch block just before the activation - /// point or not. - pub hard_reset: bool, - /// This protocol config applies starting at the era specified in the activation point. - pub activation_point: ActivationPoint, - /// Any arbitrary updates we might want to make to the global state at the start of the era - /// specified in the activation point. - pub global_state_update: Option, -} - -impl ProtocolConfig { - /// The mapping of [`Key`]s to [`StoredValue`]s we will use to update global storage in the - /// event of an emergency update. - pub(crate) fn get_update_mapping( - &self, - ) -> Result, bytesrepr::Error> { - let state_update = match &self.global_state_update { - Some(GlobalStateUpdate { entries, .. }) => entries, - None => return Ok(BTreeMap::default()), - }; - let mut update_mapping = BTreeMap::new(); - for (key, stored_value_bytes) in state_update { - let stored_value = bytesrepr::deserialize(stored_value_bytes.clone().into())?; - update_mapping.insert(*key, stored_value); - } - Ok(update_mapping) - } - - /// Returns a random `ProtocolConfig`. - #[cfg(any(feature = "testing", test))] - pub fn random(rng: &mut TestRng) -> Self { - let protocol_version = ProtocolVersion::from_parts( - rng.gen_range(0..10), - rng.gen::() as u32, - rng.gen::() as u32, - ); - let activation_point = ActivationPoint::random(rng); - - ProtocolConfig { - version: protocol_version, - hard_reset: rng.gen(), - activation_point, - global_state_update: None, - } - } -} - -impl ToBytes for ProtocolConfig { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - buffer.extend(self.version.to_string().to_bytes()?); - buffer.extend(self.hard_reset.to_bytes()?); - buffer.extend(self.activation_point.to_bytes()?); - buffer.extend(self.global_state_update.to_bytes()?); - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - self.version.to_string().serialized_length() - + self.hard_reset.serialized_length() - + self.activation_point.serialized_length() - + self.global_state_update.serialized_length() - } -} - -impl FromBytes for ProtocolConfig { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (protocol_version_string, remainder) = String::from_bytes(bytes)?; - let version = ProtocolVersion::from_str(&protocol_version_string) - .map_err(|_| bytesrepr::Error::Formatting)?; - let (hard_reset, remainder) = bool::from_bytes(remainder)?; - let (activation_point, remainder) = ActivationPoint::from_bytes(remainder)?; - let (global_state_update, remainder) = Option::::from_bytes(remainder)?; - let protocol_config = ProtocolConfig { - version, - hard_reset, - activation_point, - global_state_update, - }; - Ok((protocol_config, remainder)) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use rand::SeedableRng; - - #[test] - fn activation_point_bytesrepr_roundtrip() { - let mut rng = TestRng::from_entropy(); - let activation_point = ActivationPoint::random(&mut rng); - bytesrepr::test_serialization_roundtrip(&activation_point); - } - - #[test] - fn protocol_config_bytesrepr_roundtrip() { - let mut rng = TestRng::from_entropy(); - let config = ProtocolConfig::random(&mut rng); - bytesrepr::test_serialization_roundtrip(&config); - } -} diff --git a/casper_types_ver_2_0/src/chainspec/refund_handling.rs b/casper_types_ver_2_0/src/chainspec/refund_handling.rs deleted file mode 100644 index 0da6bb60..00000000 --- a/casper_types_ver_2_0/src/chainspec/refund_handling.rs +++ /dev/null @@ -1,97 +0,0 @@ -/// Configuration options of refund handling that are executed as part of handle payment -/// finalization. -use num_rational::Ratio; -use serde::{Deserialize, Serialize}; - -use crate::bytesrepr::{self, FromBytes, ToBytes}; - -const REFUND_HANDLING_REFUND_TAG: u8 = 0; -const REFUND_HANDLING_BURN_TAG: u8 = 1; - -/// Defines how refunds are calculated. -#[derive(Copy, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -#[serde(tag = "type", rename_all = "snake_case")] -pub enum RefundHandling { - /// Refund of excess payment amount goes to either a pre-defined purse, or back to the sender - /// and the rest of the payment amount goes to the block proposer. - Refund { - /// Computes how much refund goes back to the user after deducting gas spent from the paid - /// amount. - /// - /// user_part = (payment_amount - gas_spent_amount) * refund_ratio - /// validator_part = payment_amount - user_part - /// - /// Any dust amount that was a result of multiplying by refund_ratio goes back to user. - refund_ratio: Ratio, - }, - /// Burns the refund amount. - Burn { - /// Computes how much of the refund amount is burned after deducting gas spent from the - /// paid amount. - refund_ratio: Ratio, - }, -} - -impl ToBytes for RefundHandling { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - - match self { - RefundHandling::Refund { refund_ratio } => { - buffer.push(REFUND_HANDLING_REFUND_TAG); - buffer.extend(refund_ratio.to_bytes()?); - } - RefundHandling::Burn { refund_ratio } => { - buffer.push(REFUND_HANDLING_BURN_TAG); - buffer.extend(refund_ratio.to_bytes()?); - } - } - - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - 1 + match self { - RefundHandling::Refund { refund_ratio } => refund_ratio.serialized_length(), - RefundHandling::Burn { refund_ratio } => refund_ratio.serialized_length(), - } - } -} - -impl FromBytes for RefundHandling { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (tag, rem) = u8::from_bytes(bytes)?; - match tag { - REFUND_HANDLING_REFUND_TAG => { - let (refund_ratio, rem) = FromBytes::from_bytes(rem)?; - Ok((RefundHandling::Refund { refund_ratio }, rem)) - } - REFUND_HANDLING_BURN_TAG => { - let (refund_ratio, rem) = FromBytes::from_bytes(rem)?; - Ok((RefundHandling::Burn { refund_ratio }, rem)) - } - _ => Err(bytesrepr::Error::Formatting), - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn bytesrepr_roundtrip_for_refund() { - let refund_config = RefundHandling::Refund { - refund_ratio: Ratio::new(49, 313), - }; - bytesrepr::test_serialization_roundtrip(&refund_config); - } - - #[test] - fn bytesrepr_roundtrip_for_burn() { - let refund_config = RefundHandling::Burn { - refund_ratio: Ratio::new(49, 313), - }; - bytesrepr::test_serialization_roundtrip(&refund_config); - } -} diff --git a/casper_types_ver_2_0/src/chainspec/transaction_config.rs b/casper_types_ver_2_0/src/chainspec/transaction_config.rs deleted file mode 100644 index ea905582..00000000 --- a/casper_types_ver_2_0/src/chainspec/transaction_config.rs +++ /dev/null @@ -1,211 +0,0 @@ -mod deploy_config; -mod transaction_v1_config; - -#[cfg(any(feature = "testing", test))] -use alloc::str::FromStr; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(any(feature = "testing", test))] -use rand::Rng; -use serde::{Deserialize, Serialize}; - -#[cfg(any(feature = "testing", test))] -use crate::testing::TestRng; -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - TimeDiff, -}; - -pub use deploy_config::DeployConfig; -#[cfg(any(feature = "testing", test))] -pub use deploy_config::DEFAULT_MAX_PAYMENT_MOTES; -pub use transaction_v1_config::TransactionV1Config; - -/// The default minimum number of motes that can be transferred. -#[cfg(any(feature = "testing", test))] -pub const DEFAULT_MIN_TRANSFER_MOTES: u64 = 2_500_000_000; - -/// Configuration values associated with Transactions. -#[derive(Copy, Clone, PartialEq, Eq, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -// Disallow unknown fields to ensure config files and command-line overrides contain valid keys. -#[serde(deny_unknown_fields)] -pub struct TransactionConfig { - /// Maximum time to live any transaction can specify. - pub max_ttl: TimeDiff, - /// Maximum size in bytes of a single transaction, when bytesrepr encoded. - pub max_transaction_size: u32, - /// Maximum number of transfer transactions allowed in a block. - pub block_max_transfer_count: u32, - /// Maximum number of staking transactions allowed in a block. - pub block_max_staking_count: u32, - /// Maximum number of installer/upgrader transactions allowed in a block. - pub block_max_install_upgrade_count: u32, - /// Maximum number of other transactions (non-transfer, non-staking, non-installer/upgrader) - /// allowed in a block. - pub block_max_standard_count: u32, - /// Maximum number of approvals (signatures) allowed in a block across all transactions. - pub block_max_approval_count: u32, - /// Maximum possible size in bytes of a block. - pub max_block_size: u32, - /// Maximum sum of payment across all transactions included in a block. - pub block_gas_limit: u64, - /// Minimum token amount for a native transfer deploy or transaction (a transfer deploy or - /// transaction received with an transfer amount less than this will be rejected upon receipt). - pub native_transfer_minimum_motes: u64, - /// Maximum value to which `transaction_acceptor.timestamp_leeway` can be set in the - /// config.toml file. - pub max_timestamp_leeway: TimeDiff, - /// Configuration values specific to Deploy transactions. - #[serde(rename = "deploy")] - pub deploy_config: DeployConfig, - /// Configuration values specific to V1 transactions. - #[serde(rename = "v1")] - pub transaction_v1_config: TransactionV1Config, -} - -#[cfg(any(feature = "testing", test))] -impl TransactionConfig { - /// Generates a random instance using a `TestRng`. - pub fn random(rng: &mut TestRng) -> Self { - let max_ttl = TimeDiff::from_seconds(rng.gen_range(60..3_600)); - let max_transaction_size = rng.gen_range(100_000..1_000_000); - let block_max_transfer_count = rng.gen(); - let block_max_staking_count = rng.gen(); - let block_max_install_upgrade_count = rng.gen(); - let block_max_standard_count = rng.gen(); - let block_max_approval_count = rng.gen(); - let max_block_size = rng.gen_range(1_000_000..1_000_000_000); - let block_gas_limit = rng.gen_range(100_000_000_000..1_000_000_000_000_000); - let native_transfer_minimum_motes = - rng.gen_range(DEFAULT_MIN_TRANSFER_MOTES..1_000_000_000_000_000); - let max_timestamp_leeway = TimeDiff::from_seconds(rng.gen_range(0..6)); - let deploy_config = DeployConfig::random(rng); - let transaction_v1_config = TransactionV1Config::random(rng); - - TransactionConfig { - max_ttl, - max_transaction_size, - block_max_transfer_count, - block_max_staking_count, - block_max_install_upgrade_count, - block_max_standard_count, - block_max_approval_count, - max_block_size, - block_gas_limit, - native_transfer_minimum_motes, - max_timestamp_leeway, - deploy_config, - transaction_v1_config, - } - } -} - -#[cfg(any(feature = "testing", test))] -impl Default for TransactionConfig { - fn default() -> Self { - let eighteeen_hours = TimeDiff::from_seconds(18 * 60 * 60); - TransactionConfig { - max_ttl: eighteeen_hours, - max_transaction_size: 1_048_576, - block_max_transfer_count: 1000, - block_max_staking_count: 200, - block_max_install_upgrade_count: 2, - block_max_standard_count: 100, - block_max_approval_count: 2600, - max_block_size: 10_485_760, - block_gas_limit: 10_000_000_000_000, - native_transfer_minimum_motes: DEFAULT_MIN_TRANSFER_MOTES, - max_timestamp_leeway: TimeDiff::from_str("5sec").unwrap(), - deploy_config: DeployConfig::default(), - transaction_v1_config: TransactionV1Config::default(), - } - } -} - -impl ToBytes for TransactionConfig { - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.max_ttl.write_bytes(writer)?; - self.max_transaction_size.write_bytes(writer)?; - self.block_max_transfer_count.write_bytes(writer)?; - self.block_max_staking_count.write_bytes(writer)?; - self.block_max_install_upgrade_count.write_bytes(writer)?; - self.block_max_standard_count.write_bytes(writer)?; - self.block_max_approval_count.write_bytes(writer)?; - self.max_block_size.write_bytes(writer)?; - self.block_gas_limit.write_bytes(writer)?; - self.native_transfer_minimum_motes.write_bytes(writer)?; - self.max_timestamp_leeway.write_bytes(writer)?; - self.deploy_config.write_bytes(writer)?; - self.transaction_v1_config.write_bytes(writer) - } - - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - self.max_ttl.serialized_length() - + self.max_transaction_size.serialized_length() - + self.block_max_transfer_count.serialized_length() - + self.block_max_staking_count.serialized_length() - + self.block_max_install_upgrade_count.serialized_length() - + self.block_max_standard_count.serialized_length() - + self.block_max_approval_count.serialized_length() - + self.max_block_size.serialized_length() - + self.block_gas_limit.serialized_length() - + self.native_transfer_minimum_motes.serialized_length() - + self.max_timestamp_leeway.serialized_length() - + self.deploy_config.serialized_length() - + self.transaction_v1_config.serialized_length() - } -} - -impl FromBytes for TransactionConfig { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (max_ttl, remainder) = TimeDiff::from_bytes(bytes)?; - let (max_transaction_size, remainder) = u32::from_bytes(remainder)?; - let (block_max_transfer_count, remainder) = u32::from_bytes(remainder)?; - let (block_max_staking_count, remainder) = u32::from_bytes(remainder)?; - let (block_max_install_upgrade_count, remainder) = u32::from_bytes(remainder)?; - let (block_max_standard_count, remainder) = u32::from_bytes(remainder)?; - let (block_max_approval_count, remainder) = u32::from_bytes(remainder)?; - let (max_block_size, remainder) = u32::from_bytes(remainder)?; - let (block_gas_limit, remainder) = u64::from_bytes(remainder)?; - let (native_transfer_minimum_motes, remainder) = u64::from_bytes(remainder)?; - let (max_timestamp_leeway, remainder) = TimeDiff::from_bytes(remainder)?; - let (deploy_config, remainder) = DeployConfig::from_bytes(remainder)?; - let (transaction_v1_config, remainder) = TransactionV1Config::from_bytes(remainder)?; - let config = TransactionConfig { - max_ttl, - max_transaction_size, - block_max_transfer_count, - block_max_staking_count, - block_max_install_upgrade_count, - block_max_standard_count, - block_max_approval_count, - max_block_size, - block_gas_limit, - native_transfer_minimum_motes, - max_timestamp_leeway, - deploy_config, - transaction_v1_config, - }; - Ok((config, remainder)) - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn bytesrepr_roundtrip() { - let mut rng = TestRng::new(); - let config = TransactionConfig::random(&mut rng); - bytesrepr::test_serialization_roundtrip(&config); - } -} diff --git a/casper_types_ver_2_0/src/chainspec/transaction_config/deploy_config.rs b/casper_types_ver_2_0/src/chainspec/transaction_config/deploy_config.rs deleted file mode 100644 index 06926266..00000000 --- a/casper_types_ver_2_0/src/chainspec/transaction_config/deploy_config.rs +++ /dev/null @@ -1,112 +0,0 @@ -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(any(feature = "testing", test))] -use rand::Rng; -use serde::{Deserialize, Serialize}; - -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - Motes, -}; -#[cfg(any(feature = "testing", test))] -use crate::{testing::TestRng, U512}; - -/// The default maximum number of motes that payment code execution can cost. -#[cfg(any(feature = "testing", test))] -pub const DEFAULT_MAX_PAYMENT_MOTES: u64 = 2_500_000_000; - -/// Configuration values associated with deploys. -#[derive(Copy, Clone, PartialEq, Eq, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -// Disallow unknown fields to ensure config files and command-line overrides contain valid keys. -#[serde(deny_unknown_fields)] -pub struct DeployConfig { - /// Maximum amount any deploy can pay. - pub max_payment_cost: Motes, - /// Maximum time to live any deploy can specify. - pub max_dependencies: u8, - /// Maximum length in bytes of payment args per deploy. - pub payment_args_max_length: u32, - /// Maximum length in bytes of session args per deploy. - pub session_args_max_length: u32, -} - -#[cfg(any(feature = "testing", test))] -impl DeployConfig { - /// Generates a random instance using a `TestRng`. - pub fn random(rng: &mut TestRng) -> Self { - let max_payment_cost = Motes::new(U512::from(rng.gen_range(1_000_000..1_000_000_000))); - let max_dependencies = rng.gen(); - let payment_args_max_length = rng.gen(); - let session_args_max_length = rng.gen(); - - DeployConfig { - max_payment_cost, - max_dependencies, - payment_args_max_length, - session_args_max_length, - } - } -} - -#[cfg(any(feature = "testing", test))] -impl Default for DeployConfig { - fn default() -> Self { - DeployConfig { - max_payment_cost: Motes::new(U512::from(DEFAULT_MAX_PAYMENT_MOTES)), - max_dependencies: 10, - payment_args_max_length: 1024, - session_args_max_length: 1024, - } - } -} - -impl ToBytes for DeployConfig { - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.max_payment_cost.write_bytes(writer)?; - self.max_dependencies.write_bytes(writer)?; - self.payment_args_max_length.write_bytes(writer)?; - self.session_args_max_length.write_bytes(writer) - } - - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - self.max_payment_cost.value().serialized_length() - + self.max_dependencies.serialized_length() - + self.payment_args_max_length.serialized_length() - + self.session_args_max_length.serialized_length() - } -} - -impl FromBytes for DeployConfig { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (max_payment_cost, remainder) = Motes::from_bytes(bytes)?; - let (max_dependencies, remainder) = u8::from_bytes(remainder)?; - let (payment_args_max_length, remainder) = u32::from_bytes(remainder)?; - let (session_args_max_length, remainder) = u32::from_bytes(remainder)?; - let config = DeployConfig { - max_payment_cost, - max_dependencies, - payment_args_max_length, - session_args_max_length, - }; - Ok((config, remainder)) - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn bytesrepr_roundtrip() { - let mut rng = TestRng::new(); - let config = DeployConfig::random(&mut rng); - bytesrepr::test_serialization_roundtrip(&config); - } -} diff --git a/casper_types_ver_2_0/src/chainspec/transaction_config/transaction_v1_config.rs b/casper_types_ver_2_0/src/chainspec/transaction_config/transaction_v1_config.rs deleted file mode 100644 index 2e9220c3..00000000 --- a/casper_types_ver_2_0/src/chainspec/transaction_config/transaction_v1_config.rs +++ /dev/null @@ -1,74 +0,0 @@ -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(any(feature = "testing", test))] -use rand::Rng; -use serde::{Deserialize, Serialize}; - -use crate::bytesrepr::{self, FromBytes, ToBytes}; -#[cfg(any(feature = "testing", test))] -use crate::testing::TestRng; - -/// Configuration values associated with V1 Transactions. -#[derive(Copy, Clone, PartialEq, Eq, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -// Disallow unknown fields to ensure config files and command-line overrides contain valid keys. -#[serde(deny_unknown_fields)] -pub struct TransactionV1Config { - /// Maximum length in bytes of runtime args per Transaction. - pub max_args_length: u32, -} - -#[cfg(any(feature = "testing", test))] -impl TransactionV1Config { - /// Generates a random instance using a `TestRng`. - pub fn random(rng: &mut TestRng) -> Self { - let max_args_length = rng.gen(); - - TransactionV1Config { max_args_length } - } -} - -#[cfg(any(feature = "testing", test))] -impl Default for TransactionV1Config { - fn default() -> Self { - TransactionV1Config { - max_args_length: 1024, - } - } -} - -impl ToBytes for TransactionV1Config { - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.max_args_length.write_bytes(writer) - } - - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - self.max_args_length.serialized_length() - } -} - -impl FromBytes for TransactionV1Config { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (max_args_length, remainder) = u32::from_bytes(bytes)?; - let config = TransactionV1Config { max_args_length }; - Ok((config, remainder)) - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn bytesrepr_roundtrip() { - let mut rng = TestRng::new(); - let config = TransactionV1Config::random(&mut rng); - bytesrepr::test_serialization_roundtrip(&config); - } -} diff --git a/casper_types_ver_2_0/src/chainspec/vm_config.rs b/casper_types_ver_2_0/src/chainspec/vm_config.rs deleted file mode 100644 index 34bb856e..00000000 --- a/casper_types_ver_2_0/src/chainspec/vm_config.rs +++ /dev/null @@ -1,42 +0,0 @@ -mod auction_costs; -mod chainspec_registry; -mod handle_payment_costs; -mod host_function_costs; -mod message_limits; -mod mint_costs; -mod opcode_costs; -mod standard_payment_costs; -mod storage_costs; -mod system_config; -mod upgrade_config; -mod wasm_config; - -pub use auction_costs::{AuctionCosts, DEFAULT_ADD_BID_COST, DEFAULT_DELEGATE_COST}; -pub use chainspec_registry::ChainspecRegistry; -pub use handle_payment_costs::HandlePaymentCosts; -pub use host_function_costs::{ - Cost as HostFunctionCost, HostFunction, HostFunctionCosts, - DEFAULT_HOST_FUNCTION_NEW_DICTIONARY, DEFAULT_NEW_DICTIONARY_COST, -}; -pub use message_limits::MessageLimits; -pub use mint_costs::{MintCosts, DEFAULT_TRANSFER_COST}; -pub use opcode_costs::{BrTableCost, ControlFlowCosts, OpcodeCosts}; -#[cfg(any(feature = "testing", test))] -pub use opcode_costs::{ - DEFAULT_ADD_COST, DEFAULT_BIT_COST, DEFAULT_CONST_COST, DEFAULT_CONTROL_FLOW_BLOCK_OPCODE, - DEFAULT_CONTROL_FLOW_BR_IF_OPCODE, DEFAULT_CONTROL_FLOW_BR_OPCODE, - DEFAULT_CONTROL_FLOW_BR_TABLE_MULTIPLIER, DEFAULT_CONTROL_FLOW_BR_TABLE_OPCODE, - DEFAULT_CONTROL_FLOW_CALL_INDIRECT_OPCODE, DEFAULT_CONTROL_FLOW_CALL_OPCODE, - DEFAULT_CONTROL_FLOW_DROP_OPCODE, DEFAULT_CONTROL_FLOW_ELSE_OPCODE, - DEFAULT_CONTROL_FLOW_END_OPCODE, DEFAULT_CONTROL_FLOW_IF_OPCODE, - DEFAULT_CONTROL_FLOW_LOOP_OPCODE, DEFAULT_CONTROL_FLOW_RETURN_OPCODE, - DEFAULT_CONTROL_FLOW_SELECT_OPCODE, DEFAULT_CONVERSION_COST, DEFAULT_CURRENT_MEMORY_COST, - DEFAULT_DIV_COST, DEFAULT_GLOBAL_COST, DEFAULT_GROW_MEMORY_COST, - DEFAULT_INTEGER_COMPARISON_COST, DEFAULT_LOAD_COST, DEFAULT_LOCAL_COST, DEFAULT_MUL_COST, - DEFAULT_NOP_COST, DEFAULT_STORE_COST, DEFAULT_UNREACHABLE_COST, -}; -pub use standard_payment_costs::StandardPaymentCosts; -pub use storage_costs::StorageCosts; -pub use system_config::{SystemConfig, DEFAULT_WASMLESS_TRANSFER_COST}; -pub use upgrade_config::UpgradeConfig; -pub use wasm_config::{WasmConfig, DEFAULT_MAX_STACK_HEIGHT, DEFAULT_WASM_MAX_MEMORY}; diff --git a/casper_types_ver_2_0/src/chainspec/vm_config/auction_costs.rs b/casper_types_ver_2_0/src/chainspec/vm_config/auction_costs.rs deleted file mode 100644 index 2a673515..00000000 --- a/casper_types_ver_2_0/src/chainspec/vm_config/auction_costs.rs +++ /dev/null @@ -1,269 +0,0 @@ -//! Costs of the auction system contract. -#[cfg(feature = "datasize")] -use datasize::DataSize; -use rand::{distributions::Standard, prelude::*, Rng}; -use serde::{Deserialize, Serialize}; - -use crate::bytesrepr::{self, FromBytes, ToBytes}; - -/// Default cost of the `get_era_validators` auction entry point. -pub const DEFAULT_GET_ERA_VALIDATORS_COST: u32 = 10_000; -/// Default cost of the `read_seigniorage_recipients` auction entry point. -pub const DEFAULT_READ_SEIGNIORAGE_RECIPIENTS_COST: u32 = 10_000; -/// Default cost of the `add_bid` auction entry point. -pub const DEFAULT_ADD_BID_COST: u32 = 2_500_000_000; -/// Default cost of the `withdraw_bid` auction entry point. -pub const DEFAULT_WITHDRAW_BID_COST: u32 = 2_500_000_000; -/// Default cost of the `delegate` auction entry point. -pub const DEFAULT_DELEGATE_COST: u32 = 2_500_000_000; -/// Default cost of the `redelegate` auction entry point. -pub const DEFAULT_REDELEGATE_COST: u32 = 2_500_000_000; -/// Default cost of the `undelegate` auction entry point. -pub const DEFAULT_UNDELEGATE_COST: u32 = 2_500_000_000; -/// Default cost of the `run_auction` auction entry point. -pub const DEFAULT_RUN_AUCTION_COST: u32 = 10_000; -/// Default cost of the `slash` auction entry point. -pub const DEFAULT_SLASH_COST: u32 = 10_000; -/// Default cost of the `distribute` auction entry point. -pub const DEFAULT_DISTRIBUTE_COST: u32 = 10_000; -/// Default cost of the `withdraw_delegator_reward` auction entry point. -pub const DEFAULT_WITHDRAW_DELEGATOR_REWARD_COST: u32 = 10_000; -/// Default cost of the `withdraw_validator_reward` auction entry point. -pub const DEFAULT_WITHDRAW_VALIDATOR_REWARD_COST: u32 = 10_000; -/// Default cost of the `read_era_id` auction entry point. -pub const DEFAULT_READ_ERA_ID_COST: u32 = 10_000; -/// Default cost of the `activate_bid` auction entry point. -pub const DEFAULT_ACTIVATE_BID_COST: u32 = 10_000; - -/// Description of the costs of calling auction entrypoints. -#[derive(Clone, Copy, PartialEq, Eq, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[serde(deny_unknown_fields)] -pub struct AuctionCosts { - /// Cost of calling the `get_era_validators` entry point. - pub get_era_validators: u32, - /// Cost of calling the `read_seigniorage_recipients` entry point. - pub read_seigniorage_recipients: u32, - /// Cost of calling the `add_bid` entry point. - pub add_bid: u32, - /// Cost of calling the `withdraw_bid` entry point. - pub withdraw_bid: u32, - /// Cost of calling the `delegate` entry point. - pub delegate: u32, - /// Cost of calling the `undelegate` entry point. - pub undelegate: u32, - /// Cost of calling the `run_auction` entry point. - pub run_auction: u32, - /// Cost of calling the `slash` entry point. - pub slash: u32, - /// Cost of calling the `distribute` entry point. - pub distribute: u32, - /// Cost of calling the `withdraw_delegator_reward` entry point. - pub withdraw_delegator_reward: u32, - /// Cost of calling the `withdraw_validator_reward` entry point. - pub withdraw_validator_reward: u32, - /// Cost of calling the `read_era_id` entry point. - pub read_era_id: u32, - /// Cost of calling the `activate_bid` entry point. - pub activate_bid: u32, - /// Cost of calling the `redelegate` entry point. - pub redelegate: u32, -} - -impl Default for AuctionCosts { - fn default() -> Self { - Self { - get_era_validators: DEFAULT_GET_ERA_VALIDATORS_COST, - read_seigniorage_recipients: DEFAULT_READ_SEIGNIORAGE_RECIPIENTS_COST, - add_bid: DEFAULT_ADD_BID_COST, - withdraw_bid: DEFAULT_WITHDRAW_BID_COST, - delegate: DEFAULT_DELEGATE_COST, - undelegate: DEFAULT_UNDELEGATE_COST, - run_auction: DEFAULT_RUN_AUCTION_COST, - slash: DEFAULT_SLASH_COST, - distribute: DEFAULT_DISTRIBUTE_COST, - withdraw_delegator_reward: DEFAULT_WITHDRAW_DELEGATOR_REWARD_COST, - withdraw_validator_reward: DEFAULT_WITHDRAW_VALIDATOR_REWARD_COST, - read_era_id: DEFAULT_READ_ERA_ID_COST, - activate_bid: DEFAULT_ACTIVATE_BID_COST, - redelegate: DEFAULT_REDELEGATE_COST, - } - } -} - -impl ToBytes for AuctionCosts { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut ret = bytesrepr::unchecked_allocate_buffer(self); - - let Self { - get_era_validators, - read_seigniorage_recipients, - add_bid, - withdraw_bid, - delegate, - undelegate, - run_auction, - slash, - distribute, - withdraw_delegator_reward, - withdraw_validator_reward, - read_era_id, - activate_bid, - redelegate, - } = self; - - ret.append(&mut get_era_validators.to_bytes()?); - ret.append(&mut read_seigniorage_recipients.to_bytes()?); - ret.append(&mut add_bid.to_bytes()?); - ret.append(&mut withdraw_bid.to_bytes()?); - ret.append(&mut delegate.to_bytes()?); - ret.append(&mut undelegate.to_bytes()?); - ret.append(&mut run_auction.to_bytes()?); - ret.append(&mut slash.to_bytes()?); - ret.append(&mut distribute.to_bytes()?); - ret.append(&mut withdraw_delegator_reward.to_bytes()?); - ret.append(&mut withdraw_validator_reward.to_bytes()?); - ret.append(&mut read_era_id.to_bytes()?); - ret.append(&mut activate_bid.to_bytes()?); - ret.append(&mut redelegate.to_bytes()?); - - Ok(ret) - } - - fn serialized_length(&self) -> usize { - let Self { - get_era_validators, - read_seigniorage_recipients, - add_bid, - withdraw_bid, - delegate, - undelegate, - run_auction, - slash, - distribute, - withdraw_delegator_reward, - withdraw_validator_reward, - read_era_id, - activate_bid, - redelegate, - } = self; - - get_era_validators.serialized_length() - + read_seigniorage_recipients.serialized_length() - + add_bid.serialized_length() - + withdraw_bid.serialized_length() - + delegate.serialized_length() - + undelegate.serialized_length() - + run_auction.serialized_length() - + slash.serialized_length() - + distribute.serialized_length() - + withdraw_delegator_reward.serialized_length() - + withdraw_validator_reward.serialized_length() - + read_era_id.serialized_length() - + activate_bid.serialized_length() - + redelegate.serialized_length() - } -} - -impl FromBytes for AuctionCosts { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (get_era_validators, rem) = FromBytes::from_bytes(bytes)?; - let (read_seigniorage_recipients, rem) = FromBytes::from_bytes(rem)?; - let (add_bid, rem) = FromBytes::from_bytes(rem)?; - let (withdraw_bid, rem) = FromBytes::from_bytes(rem)?; - let (delegate, rem) = FromBytes::from_bytes(rem)?; - let (undelegate, rem) = FromBytes::from_bytes(rem)?; - let (run_auction, rem) = FromBytes::from_bytes(rem)?; - let (slash, rem) = FromBytes::from_bytes(rem)?; - let (distribute, rem) = FromBytes::from_bytes(rem)?; - let (withdraw_delegator_reward, rem) = FromBytes::from_bytes(rem)?; - let (withdraw_validator_reward, rem) = FromBytes::from_bytes(rem)?; - let (read_era_id, rem) = FromBytes::from_bytes(rem)?; - let (activate_bid, rem) = FromBytes::from_bytes(rem)?; - let (redelegate, rem) = FromBytes::from_bytes(rem)?; - Ok(( - Self { - get_era_validators, - read_seigniorage_recipients, - add_bid, - withdraw_bid, - delegate, - undelegate, - run_auction, - slash, - distribute, - withdraw_delegator_reward, - withdraw_validator_reward, - read_era_id, - activate_bid, - redelegate, - }, - rem, - )) - } -} - -impl Distribution for Standard { - fn sample(&self, rng: &mut R) -> AuctionCosts { - AuctionCosts { - get_era_validators: rng.gen(), - read_seigniorage_recipients: rng.gen(), - add_bid: rng.gen(), - withdraw_bid: rng.gen(), - delegate: rng.gen(), - undelegate: rng.gen(), - run_auction: rng.gen(), - slash: rng.gen(), - distribute: rng.gen(), - withdraw_delegator_reward: rng.gen(), - withdraw_validator_reward: rng.gen(), - read_era_id: rng.gen(), - activate_bid: rng.gen(), - redelegate: rng.gen(), - } - } -} - -#[doc(hidden)] -#[cfg(any(feature = "gens", test))] -pub mod gens { - use proptest::{num, prop_compose}; - - use super::AuctionCosts; - - prop_compose! { - pub fn auction_costs_arb()( - get_era_validators in num::u32::ANY, - read_seigniorage_recipients in num::u32::ANY, - add_bid in num::u32::ANY, - withdraw_bid in num::u32::ANY, - delegate in num::u32::ANY, - undelegate in num::u32::ANY, - run_auction in num::u32::ANY, - slash in num::u32::ANY, - distribute in num::u32::ANY, - withdraw_delegator_reward in num::u32::ANY, - withdraw_validator_reward in num::u32::ANY, - read_era_id in num::u32::ANY, - activate_bid in num::u32::ANY, - redelegate in num::u32::ANY, - ) -> AuctionCosts { - AuctionCosts { - get_era_validators, - read_seigniorage_recipients, - add_bid, - withdraw_bid, - delegate, - undelegate, - run_auction, - slash, - distribute, - withdraw_delegator_reward, - withdraw_validator_reward, - read_era_id, - activate_bid, - redelegate, - } - } - } -} diff --git a/casper_types_ver_2_0/src/chainspec/vm_config/chainspec_registry.rs b/casper_types_ver_2_0/src/chainspec/vm_config/chainspec_registry.rs deleted file mode 100644 index 38e13b15..00000000 --- a/casper_types_ver_2_0/src/chainspec/vm_config/chainspec_registry.rs +++ /dev/null @@ -1,157 +0,0 @@ -//! The registry of chainspec hash digests. - -use std::{collections::BTreeMap, convert::TryFrom}; - -use serde::{Deserialize, Serialize}; - -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - CLType, CLTyped, Digest, -}; - -type BytesreprChainspecRegistry = BTreeMap; - -/// The chainspec registry. -#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize, Debug)] -pub struct ChainspecRegistry { - chainspec_raw_hash: Digest, - genesis_accounts_raw_hash: Option, - global_state_raw_hash: Option, -} - -impl ChainspecRegistry { - const CHAINSPEC_RAW_MAP_KEY: &'static str = "chainspec_raw"; - const GENESIS_ACCOUNTS_RAW_MAP_KEY: &'static str = "genesis_accounts_raw"; - const GLOBAL_STATE_RAW_MAP_KEY: &'static str = "global_state_raw"; - - /// Returns a `ChainspecRegistry` constructed at genesis. - pub fn new_with_genesis( - chainspec_file_bytes: &[u8], - genesis_accounts_file_bytes: &[u8], - ) -> Self { - ChainspecRegistry { - chainspec_raw_hash: Digest::hash(chainspec_file_bytes), - genesis_accounts_raw_hash: Some(Digest::hash(genesis_accounts_file_bytes)), - global_state_raw_hash: None, - } - } - - /// Returns a `ChainspecRegistry` constructed at node upgrade. - pub fn new_with_optional_global_state( - chainspec_file_bytes: &[u8], - global_state_file_bytes: Option<&[u8]>, - ) -> Self { - ChainspecRegistry { - chainspec_raw_hash: Digest::hash(chainspec_file_bytes), - genesis_accounts_raw_hash: None, - global_state_raw_hash: global_state_file_bytes.map(Digest::hash), - } - } - - /// Returns the hash of the raw bytes of the chainspec.toml file. - pub fn chainspec_raw_hash(&self) -> &Digest { - &self.chainspec_raw_hash - } - - /// Returns the hash of the raw bytes of the genesis accounts.toml file if it exists. - pub fn genesis_accounts_raw_hash(&self) -> Option<&Digest> { - self.genesis_accounts_raw_hash.as_ref() - } - - /// Returns the hash of the raw bytes of the global_state.toml file if it exists. - pub fn global_state_raw_hash(&self) -> Option<&Digest> { - self.global_state_raw_hash.as_ref() - } - - fn as_map(&self) -> BytesreprChainspecRegistry { - let mut map = BTreeMap::new(); - map.insert( - Self::CHAINSPEC_RAW_MAP_KEY.to_string(), - self.chainspec_raw_hash, - ); - if let Some(genesis_accounts_raw_hash) = self.genesis_accounts_raw_hash { - map.insert( - Self::GENESIS_ACCOUNTS_RAW_MAP_KEY.to_string(), - genesis_accounts_raw_hash, - ); - } - if let Some(global_state_raw_hash) = self.global_state_raw_hash { - map.insert( - Self::GLOBAL_STATE_RAW_MAP_KEY.to_string(), - global_state_raw_hash, - ); - } - map - } -} - -impl TryFrom for ChainspecRegistry { - type Error = bytesrepr::Error; - - fn try_from(map: BytesreprChainspecRegistry) -> Result { - let chainspec_raw_hash = *map - .get(Self::CHAINSPEC_RAW_MAP_KEY) - .ok_or(bytesrepr::Error::Formatting)?; - let genesis_accounts_raw_hash = map.get(Self::GENESIS_ACCOUNTS_RAW_MAP_KEY).copied(); - let global_state_raw_hash = map.get(Self::GLOBAL_STATE_RAW_MAP_KEY).copied(); - Ok(ChainspecRegistry { - chainspec_raw_hash, - genesis_accounts_raw_hash, - global_state_raw_hash, - }) - } -} - -impl ToBytes for ChainspecRegistry { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - self.as_map().to_bytes() - } - - fn serialized_length(&self) -> usize { - self.as_map().serialized_length() - } -} - -impl FromBytes for ChainspecRegistry { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (map, remainder) = BytesreprChainspecRegistry::from_bytes(bytes)?; - let chainspec_registry = ChainspecRegistry::try_from(map)?; - Ok((chainspec_registry, remainder)) - } -} - -impl CLTyped for ChainspecRegistry { - fn cl_type() -> CLType { - BytesreprChainspecRegistry::cl_type() - } -} - -#[cfg(test)] -mod tests { - use rand::Rng; - - use super::*; - - #[test] - fn bytesrepr_roundtrip() { - let mut rng = rand::thread_rng(); - - let chainspec_file_bytes: [u8; 10] = rng.gen(); - - let genesis_account_file_bytes: [u8; 10] = rng.gen(); - let chainspec_registry = - ChainspecRegistry::new_with_genesis(&chainspec_file_bytes, &genesis_account_file_bytes); - bytesrepr::test_serialization_roundtrip(&chainspec_registry); - - let global_state_file_bytes: [u8; 10] = rng.gen(); - let chainspec_registry = ChainspecRegistry::new_with_optional_global_state( - &chainspec_file_bytes, - Some(&global_state_file_bytes), - ); - bytesrepr::test_serialization_roundtrip(&chainspec_registry); - - let chainspec_registry = - ChainspecRegistry::new_with_optional_global_state(&chainspec_file_bytes, None); - bytesrepr::test_serialization_roundtrip(&chainspec_registry); - } -} diff --git a/casper_types_ver_2_0/src/chainspec/vm_config/handle_payment_costs.rs b/casper_types_ver_2_0/src/chainspec/vm_config/handle_payment_costs.rs deleted file mode 100644 index 49f53708..00000000 --- a/casper_types_ver_2_0/src/chainspec/vm_config/handle_payment_costs.rs +++ /dev/null @@ -1,116 +0,0 @@ -//! Costs of the `handle_payment` system contract. -#[cfg(feature = "datasize")] -use datasize::DataSize; -use rand::{distributions::Standard, prelude::*, Rng}; -use serde::{Deserialize, Serialize}; - -use crate::bytesrepr::{self, FromBytes, ToBytes}; - -/// Default cost of the `get_payment_purse` `handle_payment` entry point. -pub const DEFAULT_GET_PAYMENT_PURSE_COST: u32 = 10_000; -/// Default cost of the `set_refund_purse` `handle_payment` entry point. -pub const DEFAULT_SET_REFUND_PURSE_COST: u32 = 10_000; -/// Default cost of the `get_refund_purse` `handle_payment` entry point. -pub const DEFAULT_GET_REFUND_PURSE_COST: u32 = 10_000; -/// Default cost of the `finalize_payment` `handle_payment` entry point. -pub const DEFAULT_FINALIZE_PAYMENT_COST: u32 = 10_000; - -/// Description of the costs of calling `handle_payment` entrypoints. -#[derive(Clone, Copy, PartialEq, Eq, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[serde(deny_unknown_fields)] -pub struct HandlePaymentCosts { - /// Cost of calling the `get_payment_purse` entry point. - pub get_payment_purse: u32, - /// Cost of calling the `set_refund_purse` entry point. - pub set_refund_purse: u32, - /// Cost of calling the `get_refund_purse` entry point. - pub get_refund_purse: u32, - /// Cost of calling the `finalize_payment` entry point. - pub finalize_payment: u32, -} - -impl Default for HandlePaymentCosts { - fn default() -> Self { - Self { - get_payment_purse: DEFAULT_GET_PAYMENT_PURSE_COST, - set_refund_purse: DEFAULT_SET_REFUND_PURSE_COST, - get_refund_purse: DEFAULT_GET_REFUND_PURSE_COST, - finalize_payment: DEFAULT_FINALIZE_PAYMENT_COST, - } - } -} - -impl ToBytes for HandlePaymentCosts { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut ret = bytesrepr::unchecked_allocate_buffer(self); - - ret.append(&mut self.get_payment_purse.to_bytes()?); - ret.append(&mut self.set_refund_purse.to_bytes()?); - ret.append(&mut self.get_refund_purse.to_bytes()?); - ret.append(&mut self.finalize_payment.to_bytes()?); - - Ok(ret) - } - - fn serialized_length(&self) -> usize { - self.get_payment_purse.serialized_length() - + self.set_refund_purse.serialized_length() - + self.get_refund_purse.serialized_length() - + self.finalize_payment.serialized_length() - } -} - -impl FromBytes for HandlePaymentCosts { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (get_payment_purse, rem) = FromBytes::from_bytes(bytes)?; - let (set_refund_purse, rem) = FromBytes::from_bytes(rem)?; - let (get_refund_purse, rem) = FromBytes::from_bytes(rem)?; - let (finalize_payment, rem) = FromBytes::from_bytes(rem)?; - - Ok(( - Self { - get_payment_purse, - set_refund_purse, - get_refund_purse, - finalize_payment, - }, - rem, - )) - } -} - -impl Distribution for Standard { - fn sample(&self, rng: &mut R) -> HandlePaymentCosts { - HandlePaymentCosts { - get_payment_purse: rng.gen(), - set_refund_purse: rng.gen(), - get_refund_purse: rng.gen(), - finalize_payment: rng.gen(), - } - } -} - -#[doc(hidden)] -#[cfg(any(feature = "gens", test))] -pub mod gens { - use proptest::{num, prop_compose}; - - use super::HandlePaymentCosts; - - prop_compose! { - pub fn handle_payment_costs_arb()( - get_payment_purse in num::u32::ANY, - set_refund_purse in num::u32::ANY, - get_refund_purse in num::u32::ANY, - finalize_payment in num::u32::ANY, - ) -> HandlePaymentCosts { - HandlePaymentCosts { - get_payment_purse, - set_refund_purse, - get_refund_purse, - finalize_payment, - } - } - } -} diff --git a/casper_types_ver_2_0/src/chainspec/vm_config/host_function_costs.rs b/casper_types_ver_2_0/src/chainspec/vm_config/host_function_costs.rs deleted file mode 100644 index c536fa76..00000000 --- a/casper_types_ver_2_0/src/chainspec/vm_config/host_function_costs.rs +++ /dev/null @@ -1,1080 +0,0 @@ -//! Support for host function gas cost tables. -use core::ops::Add; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -use derive_more::Add; -use num_traits::Zero; -use rand::{distributions::Standard, prelude::Distribution, Rng}; -use serde::{Deserialize, Serialize}; - -use crate::{ - bytesrepr::{self, FromBytes, ToBytes, U32_SERIALIZED_LENGTH}, - Gas, -}; - -/// Representation of argument's cost. -pub type Cost = u32; - -const COST_SERIALIZED_LENGTH: usize = U32_SERIALIZED_LENGTH; - -/// An identifier that represents an unused argument. -const NOT_USED: Cost = 0; - -/// An arbitrary default fixed cost for host functions that were not researched yet. -const DEFAULT_FIXED_COST: Cost = 200; - -const DEFAULT_ADD_COST: u32 = 5_800; -const DEFAULT_ADD_ASSOCIATED_KEY_COST: u32 = 9_000; - -const DEFAULT_CALL_CONTRACT_COST: u32 = 4_500; -const DEFAULT_CALL_CONTRACT_ARGS_SIZE_WEIGHT: u32 = 420; - -const DEFAULT_CREATE_PURSE_COST: u32 = 2_500_000_000; -const DEFAULT_GET_BALANCE_COST: u32 = 3_800; -const DEFAULT_GET_BLOCKTIME_COST: u32 = 330; -const DEFAULT_GET_CALLER_COST: u32 = 380; -const DEFAULT_GET_KEY_COST: u32 = 2_000; -const DEFAULT_GET_KEY_NAME_SIZE_WEIGHT: u32 = 440; -const DEFAULT_GET_MAIN_PURSE_COST: u32 = 1_300; -const DEFAULT_GET_PHASE_COST: u32 = 710; -const DEFAULT_GET_SYSTEM_CONTRACT_COST: u32 = 1_100; -const DEFAULT_HAS_KEY_COST: u32 = 1_500; -const DEFAULT_HAS_KEY_NAME_SIZE_WEIGHT: u32 = 840; -const DEFAULT_IS_VALID_UREF_COST: u32 = 760; -const DEFAULT_LOAD_NAMED_KEYS_COST: u32 = 42_000; -const DEFAULT_NEW_UREF_COST: u32 = 17_000; -const DEFAULT_NEW_UREF_VALUE_SIZE_WEIGHT: u32 = 590; - -const DEFAULT_PRINT_COST: u32 = 20_000; -const DEFAULT_PRINT_TEXT_SIZE_WEIGHT: u32 = 4_600; - -const DEFAULT_PUT_KEY_COST: u32 = 38_000; -const DEFAULT_PUT_KEY_NAME_SIZE_WEIGHT: u32 = 1_100; - -const DEFAULT_READ_HOST_BUFFER_COST: u32 = 3_500; -const DEFAULT_READ_HOST_BUFFER_DEST_SIZE_WEIGHT: u32 = 310; - -const DEFAULT_READ_VALUE_COST: u32 = 6_000; -const DEFAULT_DICTIONARY_GET_COST: u32 = 5_500; -const DEFAULT_DICTIONARY_GET_KEY_SIZE_WEIGHT: u32 = 590; - -const DEFAULT_REMOVE_ASSOCIATED_KEY_COST: u32 = 4_200; - -const DEFAULT_REMOVE_KEY_COST: u32 = 61_000; -const DEFAULT_REMOVE_KEY_NAME_SIZE_WEIGHT: u32 = 3_200; - -const DEFAULT_RET_COST: u32 = 23_000; -const DEFAULT_RET_VALUE_SIZE_WEIGHT: u32 = 420_000; - -const DEFAULT_REVERT_COST: u32 = 500; -const DEFAULT_SET_ACTION_THRESHOLD_COST: u32 = 74_000; -const DEFAULT_TRANSFER_FROM_PURSE_TO_ACCOUNT_COST: u32 = 2_500_000_000; -const DEFAULT_TRANSFER_FROM_PURSE_TO_PURSE_COST: u32 = 82_000; -const DEFAULT_TRANSFER_TO_ACCOUNT_COST: u32 = 2_500_000_000; -const DEFAULT_UPDATE_ASSOCIATED_KEY_COST: u32 = 4_200; - -const DEFAULT_WRITE_COST: u32 = 14_000; -const DEFAULT_WRITE_VALUE_SIZE_WEIGHT: u32 = 980; - -const DEFAULT_DICTIONARY_PUT_COST: u32 = 9_500; -const DEFAULT_DICTIONARY_PUT_KEY_BYTES_SIZE_WEIGHT: u32 = 1_800; -const DEFAULT_DICTIONARY_PUT_VALUE_SIZE_WEIGHT: u32 = 520; - -/// Default cost for a new dictionary. -pub const DEFAULT_NEW_DICTIONARY_COST: u32 = DEFAULT_NEW_UREF_COST; - -/// Host function cost unit for a new dictionary. -pub const DEFAULT_HOST_FUNCTION_NEW_DICTIONARY: HostFunction<[Cost; 1]> = - HostFunction::new(DEFAULT_NEW_DICTIONARY_COST, [NOT_USED]); - -/// Default value that the cost of calling `casper_emit_message` increases by for every new message -/// emitted within an execution. -pub const DEFAULT_COST_INCREASE_PER_MESSAGE_EMITTED: u32 = 50; - -/// Representation of a host function cost. -/// -/// The total gas cost is equal to `cost` + sum of each argument weight multiplied by the byte size -/// of the data. -#[derive(Copy, Clone, PartialEq, Eq, Deserialize, Serialize, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[serde(deny_unknown_fields)] -pub struct HostFunction { - /// How much the user is charged for calling the host function. - cost: Cost, - /// Weights of the function arguments. - arguments: T, -} - -impl Default for HostFunction -where - T: Default, -{ - fn default() -> Self { - HostFunction::new(DEFAULT_FIXED_COST, Default::default()) - } -} - -impl HostFunction { - /// Creates a new instance of `HostFunction` with a fixed call cost and argument weights. - pub const fn new(cost: Cost, arguments: T) -> Self { - Self { cost, arguments } - } - - /// Returns the base gas fee for calling the host function. - pub fn cost(&self) -> Cost { - self.cost - } -} - -impl HostFunction -where - T: Default, -{ - /// Creates a new fixed host function cost with argument weights of zero. - pub fn fixed(cost: Cost) -> Self { - Self { - cost, - ..Default::default() - } - } -} - -impl HostFunction -where - T: AsRef<[Cost]>, -{ - /// Returns a slice containing the argument weights. - pub fn arguments(&self) -> &[Cost] { - self.arguments.as_ref() - } - - /// Calculate gas cost for a host function - pub fn calculate_gas_cost(&self, weights: T) -> Gas { - let mut gas = Gas::new(self.cost.into()); - for (argument, weight) in self.arguments.as_ref().iter().zip(weights.as_ref()) { - let lhs = Gas::new((*argument).into()); - let rhs = Gas::new((*weight).into()); - gas += lhs * rhs; - } - gas - } -} - -impl Add for HostFunction<[Cost; COUNT]> { - type Output = HostFunction<[Cost; COUNT]>; - - fn add(self, rhs: Self) -> Self::Output { - let mut result = HostFunction::new(self.cost + rhs.cost, [0; COUNT]); - for i in 0..COUNT { - result.arguments[i] = self.arguments[i] + rhs.arguments[i]; - } - result - } -} - -impl Zero for HostFunction<[Cost; COUNT]> { - fn zero() -> Self { - HostFunction::new(0, [0; COUNT]) - } - - fn is_zero(&self) -> bool { - !self.arguments.iter().any(|cost| *cost != 0) && self.cost.is_zero() - } -} - -impl Distribution> for Standard -where - Standard: Distribution, - T: AsRef<[Cost]>, -{ - fn sample(&self, rng: &mut R) -> HostFunction { - let cost = rng.gen::(); - let arguments = rng.gen(); - HostFunction::new(cost, arguments) - } -} - -impl ToBytes for HostFunction -where - T: AsRef<[Cost]>, -{ - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut ret = bytesrepr::unchecked_allocate_buffer(self); - ret.append(&mut self.cost.to_bytes()?); - for value in self.arguments.as_ref().iter() { - ret.append(&mut value.to_bytes()?); - } - Ok(ret) - } - - fn serialized_length(&self) -> usize { - self.cost.serialized_length() + (COST_SERIALIZED_LENGTH * self.arguments.as_ref().len()) - } -} - -impl FromBytes for HostFunction -where - T: Default + AsMut<[Cost]>, -{ - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (cost, mut bytes) = FromBytes::from_bytes(bytes)?; - let mut arguments = T::default(); - let arguments_mut = arguments.as_mut(); - for ith_argument in arguments_mut { - let (cost, rem) = FromBytes::from_bytes(bytes)?; - *ith_argument = cost; - bytes = rem; - } - Ok((Self { cost, arguments }, bytes)) - } -} - -/// Definition of a host function cost table. -#[derive(Add, Copy, Clone, PartialEq, Eq, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[serde(deny_unknown_fields)] -pub struct HostFunctionCosts { - /// Cost increase for successive calls to `casper_emit_message` within an execution. - pub cost_increase_per_message: u32, - /// Cost of calling the `read_value` host function. - pub read_value: HostFunction<[Cost; 3]>, - /// Cost of calling the `dictionary_get` host function. - #[serde(alias = "read_value_local")] - pub dictionary_get: HostFunction<[Cost; 3]>, - /// Cost of calling the `write` host function. - pub write: HostFunction<[Cost; 4]>, - /// Cost of calling the `dictionary_put` host function. - #[serde(alias = "write_local")] - pub dictionary_put: HostFunction<[Cost; 4]>, - /// Cost of calling the `add` host function. - pub add: HostFunction<[Cost; 4]>, - /// Cost of calling the `new_uref` host function. - pub new_uref: HostFunction<[Cost; 3]>, - /// Cost of calling the `load_named_keys` host function. - pub load_named_keys: HostFunction<[Cost; 2]>, - /// Cost of calling the `ret` host function. - pub ret: HostFunction<[Cost; 2]>, - /// Cost of calling the `get_key` host function. - pub get_key: HostFunction<[Cost; 5]>, - /// Cost of calling the `has_key` host function. - pub has_key: HostFunction<[Cost; 2]>, - /// Cost of calling the `put_key` host function. - pub put_key: HostFunction<[Cost; 4]>, - /// Cost of calling the `remove_key` host function. - pub remove_key: HostFunction<[Cost; 2]>, - /// Cost of calling the `revert` host function. - pub revert: HostFunction<[Cost; 1]>, - /// Cost of calling the `is_valid_uref` host function. - pub is_valid_uref: HostFunction<[Cost; 2]>, - /// Cost of calling the `add_associated_key` host function. - pub add_associated_key: HostFunction<[Cost; 3]>, - /// Cost of calling the `remove_associated_key` host function. - pub remove_associated_key: HostFunction<[Cost; 2]>, - /// Cost of calling the `update_associated_key` host function. - pub update_associated_key: HostFunction<[Cost; 3]>, - /// Cost of calling the `set_action_threshold` host function. - pub set_action_threshold: HostFunction<[Cost; 2]>, - /// Cost of calling the `get_caller` host function. - pub get_caller: HostFunction<[Cost; 1]>, - /// Cost of calling the `get_blocktime` host function. - pub get_blocktime: HostFunction<[Cost; 1]>, - /// Cost of calling the `create_purse` host function. - pub create_purse: HostFunction<[Cost; 2]>, - /// Cost of calling the `transfer_to_account` host function. - pub transfer_to_account: HostFunction<[Cost; 7]>, - /// Cost of calling the `transfer_from_purse_to_account` host function. - pub transfer_from_purse_to_account: HostFunction<[Cost; 9]>, - /// Cost of calling the `transfer_from_purse_to_purse` host function. - pub transfer_from_purse_to_purse: HostFunction<[Cost; 8]>, - /// Cost of calling the `get_balance` host function. - pub get_balance: HostFunction<[Cost; 3]>, - /// Cost of calling the `get_phase` host function. - pub get_phase: HostFunction<[Cost; 1]>, - /// Cost of calling the `get_system_contract` host function. - pub get_system_contract: HostFunction<[Cost; 3]>, - /// Cost of calling the `get_main_purse` host function. - pub get_main_purse: HostFunction<[Cost; 1]>, - /// Cost of calling the `read_host_buffer` host function. - pub read_host_buffer: HostFunction<[Cost; 3]>, - /// Cost of calling the `create_contract_package_at_hash` host function. - pub create_contract_package_at_hash: HostFunction<[Cost; 2]>, - /// Cost of calling the `create_contract_user_group` host function. - pub create_contract_user_group: HostFunction<[Cost; 8]>, - /// Cost of calling the `add_contract_version` host function. - pub add_contract_version: HostFunction<[Cost; 9]>, - /// Cost of calling the `disable_contract_version` host function. - pub disable_contract_version: HostFunction<[Cost; 4]>, - /// Cost of calling the `call_contract` host function. - pub call_contract: HostFunction<[Cost; 7]>, - /// Cost of calling the `call_versioned_contract` host function. - pub call_versioned_contract: HostFunction<[Cost; 9]>, - /// Cost of calling the `get_named_arg_size` host function. - pub get_named_arg_size: HostFunction<[Cost; 3]>, - /// Cost of calling the `get_named_arg` host function. - pub get_named_arg: HostFunction<[Cost; 4]>, - /// Cost of calling the `remove_contract_user_group` host function. - pub remove_contract_user_group: HostFunction<[Cost; 4]>, - /// Cost of calling the `provision_contract_user_group_uref` host function. - pub provision_contract_user_group_uref: HostFunction<[Cost; 5]>, - /// Cost of calling the `remove_contract_user_group_urefs` host function. - pub remove_contract_user_group_urefs: HostFunction<[Cost; 6]>, - /// Cost of calling the `print` host function. - pub print: HostFunction<[Cost; 2]>, - /// Cost of calling the `blake2b` host function. - pub blake2b: HostFunction<[Cost; 4]>, - /// Cost of calling the `next address` host function. - pub random_bytes: HostFunction<[Cost; 2]>, - /// Cost of calling the `enable_contract_version` host function. - pub enable_contract_version: HostFunction<[Cost; 4]>, - /// Cost of calling the `add_session_version` host function. - pub add_session_version: HostFunction<[Cost; 2]>, - /// Cost of calling the `casper_manage_message_topic` host function. - pub manage_message_topic: HostFunction<[Cost; 4]>, - /// Cost of calling the `casper_emit_message` host function. - pub emit_message: HostFunction<[Cost; 4]>, -} - -impl Zero for HostFunctionCosts { - fn zero() -> Self { - Self { - read_value: HostFunction::zero(), - dictionary_get: HostFunction::zero(), - write: HostFunction::zero(), - dictionary_put: HostFunction::zero(), - add: HostFunction::zero(), - new_uref: HostFunction::zero(), - load_named_keys: HostFunction::zero(), - ret: HostFunction::zero(), - get_key: HostFunction::zero(), - has_key: HostFunction::zero(), - put_key: HostFunction::zero(), - remove_key: HostFunction::zero(), - revert: HostFunction::zero(), - is_valid_uref: HostFunction::zero(), - add_associated_key: HostFunction::zero(), - remove_associated_key: HostFunction::zero(), - update_associated_key: HostFunction::zero(), - set_action_threshold: HostFunction::zero(), - get_caller: HostFunction::zero(), - get_blocktime: HostFunction::zero(), - create_purse: HostFunction::zero(), - transfer_to_account: HostFunction::zero(), - transfer_from_purse_to_account: HostFunction::zero(), - transfer_from_purse_to_purse: HostFunction::zero(), - get_balance: HostFunction::zero(), - get_phase: HostFunction::zero(), - get_system_contract: HostFunction::zero(), - get_main_purse: HostFunction::zero(), - read_host_buffer: HostFunction::zero(), - create_contract_package_at_hash: HostFunction::zero(), - create_contract_user_group: HostFunction::zero(), - add_contract_version: HostFunction::zero(), - disable_contract_version: HostFunction::zero(), - call_contract: HostFunction::zero(), - call_versioned_contract: HostFunction::zero(), - get_named_arg_size: HostFunction::zero(), - get_named_arg: HostFunction::zero(), - remove_contract_user_group: HostFunction::zero(), - provision_contract_user_group_uref: HostFunction::zero(), - remove_contract_user_group_urefs: HostFunction::zero(), - print: HostFunction::zero(), - blake2b: HostFunction::zero(), - random_bytes: HostFunction::zero(), - enable_contract_version: HostFunction::zero(), - add_session_version: HostFunction::zero(), - manage_message_topic: HostFunction::zero(), - emit_message: HostFunction::zero(), - cost_increase_per_message: Zero::zero(), - } - } - - fn is_zero(&self) -> bool { - let HostFunctionCosts { - cost_increase_per_message, - read_value, - dictionary_get, - write, - dictionary_put, - add, - new_uref, - load_named_keys, - ret, - get_key, - has_key, - put_key, - remove_key, - revert, - is_valid_uref, - add_associated_key, - remove_associated_key, - update_associated_key, - set_action_threshold, - get_caller, - get_blocktime, - create_purse, - transfer_to_account, - transfer_from_purse_to_account, - transfer_from_purse_to_purse, - get_balance, - get_phase, - get_system_contract, - get_main_purse, - read_host_buffer, - create_contract_package_at_hash, - create_contract_user_group, - add_contract_version, - disable_contract_version, - call_contract, - call_versioned_contract, - get_named_arg_size, - get_named_arg, - remove_contract_user_group, - provision_contract_user_group_uref, - remove_contract_user_group_urefs, - print, - blake2b, - random_bytes, - enable_contract_version, - add_session_version, - manage_message_topic, - emit_message, - } = self; - read_value.is_zero() - && dictionary_get.is_zero() - && write.is_zero() - && dictionary_put.is_zero() - && add.is_zero() - && new_uref.is_zero() - && load_named_keys.is_zero() - && ret.is_zero() - && get_key.is_zero() - && has_key.is_zero() - && put_key.is_zero() - && remove_key.is_zero() - && revert.is_zero() - && is_valid_uref.is_zero() - && add_associated_key.is_zero() - && remove_associated_key.is_zero() - && update_associated_key.is_zero() - && set_action_threshold.is_zero() - && get_caller.is_zero() - && get_blocktime.is_zero() - && create_purse.is_zero() - && transfer_to_account.is_zero() - && transfer_from_purse_to_account.is_zero() - && transfer_from_purse_to_purse.is_zero() - && get_balance.is_zero() - && get_phase.is_zero() - && get_system_contract.is_zero() - && get_main_purse.is_zero() - && read_host_buffer.is_zero() - && create_contract_package_at_hash.is_zero() - && create_contract_user_group.is_zero() - && add_contract_version.is_zero() - && disable_contract_version.is_zero() - && call_contract.is_zero() - && call_versioned_contract.is_zero() - && get_named_arg_size.is_zero() - && get_named_arg.is_zero() - && remove_contract_user_group.is_zero() - && provision_contract_user_group_uref.is_zero() - && remove_contract_user_group_urefs.is_zero() - && print.is_zero() - && blake2b.is_zero() - && random_bytes.is_zero() - && enable_contract_version.is_zero() - && add_session_version.is_zero() - && manage_message_topic.is_zero() - && emit_message.is_zero() - && cost_increase_per_message.is_zero() - } -} - -impl Default for HostFunctionCosts { - fn default() -> Self { - Self { - read_value: HostFunction::fixed(DEFAULT_READ_VALUE_COST), - dictionary_get: HostFunction::new( - DEFAULT_DICTIONARY_GET_COST, - [NOT_USED, DEFAULT_DICTIONARY_GET_KEY_SIZE_WEIGHT, NOT_USED], - ), - write: HostFunction::new( - DEFAULT_WRITE_COST, - [ - NOT_USED, - NOT_USED, - NOT_USED, - DEFAULT_WRITE_VALUE_SIZE_WEIGHT, - ], - ), - dictionary_put: HostFunction::new( - DEFAULT_DICTIONARY_PUT_COST, - [ - NOT_USED, - DEFAULT_DICTIONARY_PUT_KEY_BYTES_SIZE_WEIGHT, - NOT_USED, - DEFAULT_DICTIONARY_PUT_VALUE_SIZE_WEIGHT, - ], - ), - add: HostFunction::fixed(DEFAULT_ADD_COST), - new_uref: HostFunction::new( - DEFAULT_NEW_UREF_COST, - [NOT_USED, NOT_USED, DEFAULT_NEW_UREF_VALUE_SIZE_WEIGHT], - ), - load_named_keys: HostFunction::fixed(DEFAULT_LOAD_NAMED_KEYS_COST), - ret: HostFunction::new(DEFAULT_RET_COST, [NOT_USED, DEFAULT_RET_VALUE_SIZE_WEIGHT]), - get_key: HostFunction::new( - DEFAULT_GET_KEY_COST, - [ - NOT_USED, - DEFAULT_GET_KEY_NAME_SIZE_WEIGHT, - NOT_USED, - NOT_USED, - NOT_USED, - ], - ), - has_key: HostFunction::new( - DEFAULT_HAS_KEY_COST, - [NOT_USED, DEFAULT_HAS_KEY_NAME_SIZE_WEIGHT], - ), - put_key: HostFunction::new( - DEFAULT_PUT_KEY_COST, - [ - NOT_USED, - DEFAULT_PUT_KEY_NAME_SIZE_WEIGHT, - NOT_USED, - NOT_USED, - ], - ), - remove_key: HostFunction::new( - DEFAULT_REMOVE_KEY_COST, - [NOT_USED, DEFAULT_REMOVE_KEY_NAME_SIZE_WEIGHT], - ), - revert: HostFunction::fixed(DEFAULT_REVERT_COST), - is_valid_uref: HostFunction::fixed(DEFAULT_IS_VALID_UREF_COST), - add_associated_key: HostFunction::fixed(DEFAULT_ADD_ASSOCIATED_KEY_COST), - remove_associated_key: HostFunction::fixed(DEFAULT_REMOVE_ASSOCIATED_KEY_COST), - update_associated_key: HostFunction::fixed(DEFAULT_UPDATE_ASSOCIATED_KEY_COST), - set_action_threshold: HostFunction::fixed(DEFAULT_SET_ACTION_THRESHOLD_COST), - get_caller: HostFunction::fixed(DEFAULT_GET_CALLER_COST), - get_blocktime: HostFunction::fixed(DEFAULT_GET_BLOCKTIME_COST), - create_purse: HostFunction::fixed(DEFAULT_CREATE_PURSE_COST), - transfer_to_account: HostFunction::fixed(DEFAULT_TRANSFER_TO_ACCOUNT_COST), - transfer_from_purse_to_account: HostFunction::fixed( - DEFAULT_TRANSFER_FROM_PURSE_TO_ACCOUNT_COST, - ), - transfer_from_purse_to_purse: HostFunction::fixed( - DEFAULT_TRANSFER_FROM_PURSE_TO_PURSE_COST, - ), - get_balance: HostFunction::fixed(DEFAULT_GET_BALANCE_COST), - get_phase: HostFunction::fixed(DEFAULT_GET_PHASE_COST), - get_system_contract: HostFunction::fixed(DEFAULT_GET_SYSTEM_CONTRACT_COST), - get_main_purse: HostFunction::fixed(DEFAULT_GET_MAIN_PURSE_COST), - read_host_buffer: HostFunction::new( - DEFAULT_READ_HOST_BUFFER_COST, - [ - NOT_USED, - DEFAULT_READ_HOST_BUFFER_DEST_SIZE_WEIGHT, - NOT_USED, - ], - ), - create_contract_package_at_hash: HostFunction::default(), - create_contract_user_group: HostFunction::default(), - add_contract_version: HostFunction::default(), - disable_contract_version: HostFunction::default(), - call_contract: HostFunction::new( - DEFAULT_CALL_CONTRACT_COST, - [ - NOT_USED, - NOT_USED, - NOT_USED, - NOT_USED, - NOT_USED, - DEFAULT_CALL_CONTRACT_ARGS_SIZE_WEIGHT, - NOT_USED, - ], - ), - call_versioned_contract: HostFunction::new( - DEFAULT_CALL_CONTRACT_COST, - [ - NOT_USED, - NOT_USED, - NOT_USED, - NOT_USED, - NOT_USED, - NOT_USED, - NOT_USED, - DEFAULT_CALL_CONTRACT_ARGS_SIZE_WEIGHT, - NOT_USED, - ], - ), - get_named_arg_size: HostFunction::default(), - get_named_arg: HostFunction::default(), - remove_contract_user_group: HostFunction::default(), - provision_contract_user_group_uref: HostFunction::default(), - remove_contract_user_group_urefs: HostFunction::default(), - print: HostFunction::new( - DEFAULT_PRINT_COST, - [NOT_USED, DEFAULT_PRINT_TEXT_SIZE_WEIGHT], - ), - blake2b: HostFunction::default(), - random_bytes: HostFunction::default(), - enable_contract_version: HostFunction::default(), - add_session_version: HostFunction::default(), - manage_message_topic: HostFunction::default(), - emit_message: HostFunction::default(), - cost_increase_per_message: DEFAULT_COST_INCREASE_PER_MESSAGE_EMITTED, - } - } -} - -impl ToBytes for HostFunctionCosts { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut ret = bytesrepr::unchecked_allocate_buffer(self); - ret.append(&mut self.read_value.to_bytes()?); - ret.append(&mut self.dictionary_get.to_bytes()?); - ret.append(&mut self.write.to_bytes()?); - ret.append(&mut self.dictionary_put.to_bytes()?); - ret.append(&mut self.add.to_bytes()?); - ret.append(&mut self.new_uref.to_bytes()?); - ret.append(&mut self.load_named_keys.to_bytes()?); - ret.append(&mut self.ret.to_bytes()?); - ret.append(&mut self.get_key.to_bytes()?); - ret.append(&mut self.has_key.to_bytes()?); - ret.append(&mut self.put_key.to_bytes()?); - ret.append(&mut self.remove_key.to_bytes()?); - ret.append(&mut self.revert.to_bytes()?); - ret.append(&mut self.is_valid_uref.to_bytes()?); - ret.append(&mut self.add_associated_key.to_bytes()?); - ret.append(&mut self.remove_associated_key.to_bytes()?); - ret.append(&mut self.update_associated_key.to_bytes()?); - ret.append(&mut self.set_action_threshold.to_bytes()?); - ret.append(&mut self.get_caller.to_bytes()?); - ret.append(&mut self.get_blocktime.to_bytes()?); - ret.append(&mut self.create_purse.to_bytes()?); - ret.append(&mut self.transfer_to_account.to_bytes()?); - ret.append(&mut self.transfer_from_purse_to_account.to_bytes()?); - ret.append(&mut self.transfer_from_purse_to_purse.to_bytes()?); - ret.append(&mut self.get_balance.to_bytes()?); - ret.append(&mut self.get_phase.to_bytes()?); - ret.append(&mut self.get_system_contract.to_bytes()?); - ret.append(&mut self.get_main_purse.to_bytes()?); - ret.append(&mut self.read_host_buffer.to_bytes()?); - ret.append(&mut self.create_contract_package_at_hash.to_bytes()?); - ret.append(&mut self.create_contract_user_group.to_bytes()?); - ret.append(&mut self.add_contract_version.to_bytes()?); - ret.append(&mut self.disable_contract_version.to_bytes()?); - ret.append(&mut self.call_contract.to_bytes()?); - ret.append(&mut self.call_versioned_contract.to_bytes()?); - ret.append(&mut self.get_named_arg_size.to_bytes()?); - ret.append(&mut self.get_named_arg.to_bytes()?); - ret.append(&mut self.remove_contract_user_group.to_bytes()?); - ret.append(&mut self.provision_contract_user_group_uref.to_bytes()?); - ret.append(&mut self.remove_contract_user_group_urefs.to_bytes()?); - ret.append(&mut self.print.to_bytes()?); - ret.append(&mut self.blake2b.to_bytes()?); - ret.append(&mut self.random_bytes.to_bytes()?); - ret.append(&mut self.enable_contract_version.to_bytes()?); - ret.append(&mut self.add_session_version.to_bytes()?); - ret.append(&mut self.manage_message_topic.to_bytes()?); - ret.append(&mut self.emit_message.to_bytes()?); - ret.append(&mut self.cost_increase_per_message.to_bytes()?); - Ok(ret) - } - - fn serialized_length(&self) -> usize { - self.read_value.serialized_length() - + self.dictionary_get.serialized_length() - + self.write.serialized_length() - + self.dictionary_put.serialized_length() - + self.add.serialized_length() - + self.new_uref.serialized_length() - + self.load_named_keys.serialized_length() - + self.ret.serialized_length() - + self.get_key.serialized_length() - + self.has_key.serialized_length() - + self.put_key.serialized_length() - + self.remove_key.serialized_length() - + self.revert.serialized_length() - + self.is_valid_uref.serialized_length() - + self.add_associated_key.serialized_length() - + self.remove_associated_key.serialized_length() - + self.update_associated_key.serialized_length() - + self.set_action_threshold.serialized_length() - + self.get_caller.serialized_length() - + self.get_blocktime.serialized_length() - + self.create_purse.serialized_length() - + self.transfer_to_account.serialized_length() - + self.transfer_from_purse_to_account.serialized_length() - + self.transfer_from_purse_to_purse.serialized_length() - + self.get_balance.serialized_length() - + self.get_phase.serialized_length() - + self.get_system_contract.serialized_length() - + self.get_main_purse.serialized_length() - + self.read_host_buffer.serialized_length() - + self.create_contract_package_at_hash.serialized_length() - + self.create_contract_user_group.serialized_length() - + self.add_contract_version.serialized_length() - + self.disable_contract_version.serialized_length() - + self.call_contract.serialized_length() - + self.call_versioned_contract.serialized_length() - + self.get_named_arg_size.serialized_length() - + self.get_named_arg.serialized_length() - + self.remove_contract_user_group.serialized_length() - + self.provision_contract_user_group_uref.serialized_length() - + self.remove_contract_user_group_urefs.serialized_length() - + self.print.serialized_length() - + self.blake2b.serialized_length() - + self.random_bytes.serialized_length() - + self.enable_contract_version.serialized_length() - + self.add_session_version.serialized_length() - + self.manage_message_topic.serialized_length() - + self.emit_message.serialized_length() - + self.cost_increase_per_message.serialized_length() - } -} - -impl FromBytes for HostFunctionCosts { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (read_value, rem) = FromBytes::from_bytes(bytes)?; - let (dictionary_get, rem) = FromBytes::from_bytes(rem)?; - let (write, rem) = FromBytes::from_bytes(rem)?; - let (dictionary_put, rem) = FromBytes::from_bytes(rem)?; - let (add, rem) = FromBytes::from_bytes(rem)?; - let (new_uref, rem) = FromBytes::from_bytes(rem)?; - let (load_named_keys, rem) = FromBytes::from_bytes(rem)?; - let (ret, rem) = FromBytes::from_bytes(rem)?; - let (get_key, rem) = FromBytes::from_bytes(rem)?; - let (has_key, rem) = FromBytes::from_bytes(rem)?; - let (put_key, rem) = FromBytes::from_bytes(rem)?; - let (remove_key, rem) = FromBytes::from_bytes(rem)?; - let (revert, rem) = FromBytes::from_bytes(rem)?; - let (is_valid_uref, rem) = FromBytes::from_bytes(rem)?; - let (add_associated_key, rem) = FromBytes::from_bytes(rem)?; - let (remove_associated_key, rem) = FromBytes::from_bytes(rem)?; - let (update_associated_key, rem) = FromBytes::from_bytes(rem)?; - let (set_action_threshold, rem) = FromBytes::from_bytes(rem)?; - let (get_caller, rem) = FromBytes::from_bytes(rem)?; - let (get_blocktime, rem) = FromBytes::from_bytes(rem)?; - let (create_purse, rem) = FromBytes::from_bytes(rem)?; - let (transfer_to_account, rem) = FromBytes::from_bytes(rem)?; - let (transfer_from_purse_to_account, rem) = FromBytes::from_bytes(rem)?; - let (transfer_from_purse_to_purse, rem) = FromBytes::from_bytes(rem)?; - let (get_balance, rem) = FromBytes::from_bytes(rem)?; - let (get_phase, rem) = FromBytes::from_bytes(rem)?; - let (get_system_contract, rem) = FromBytes::from_bytes(rem)?; - let (get_main_purse, rem) = FromBytes::from_bytes(rem)?; - let (read_host_buffer, rem) = FromBytes::from_bytes(rem)?; - let (create_contract_package_at_hash, rem) = FromBytes::from_bytes(rem)?; - let (create_contract_user_group, rem) = FromBytes::from_bytes(rem)?; - let (add_contract_version, rem) = FromBytes::from_bytes(rem)?; - let (disable_contract_version, rem) = FromBytes::from_bytes(rem)?; - let (call_contract, rem) = FromBytes::from_bytes(rem)?; - let (call_versioned_contract, rem) = FromBytes::from_bytes(rem)?; - let (get_named_arg_size, rem) = FromBytes::from_bytes(rem)?; - let (get_named_arg, rem) = FromBytes::from_bytes(rem)?; - let (remove_contract_user_group, rem) = FromBytes::from_bytes(rem)?; - let (provision_contract_user_group_uref, rem) = FromBytes::from_bytes(rem)?; - let (remove_contract_user_group_urefs, rem) = FromBytes::from_bytes(rem)?; - let (print, rem) = FromBytes::from_bytes(rem)?; - let (blake2b, rem) = FromBytes::from_bytes(rem)?; - let (random_bytes, rem) = FromBytes::from_bytes(rem)?; - let (enable_contract_version, rem) = FromBytes::from_bytes(rem)?; - let (add_session_version, rem) = FromBytes::from_bytes(rem)?; - let (manage_message_topic, rem) = FromBytes::from_bytes(rem)?; - let (emit_message, rem) = FromBytes::from_bytes(rem)?; - let (cost_increase_per_message, rem) = FromBytes::from_bytes(rem)?; - Ok(( - HostFunctionCosts { - read_value, - dictionary_get, - write, - dictionary_put, - add, - new_uref, - load_named_keys, - ret, - get_key, - has_key, - put_key, - remove_key, - revert, - is_valid_uref, - add_associated_key, - remove_associated_key, - update_associated_key, - set_action_threshold, - get_caller, - get_blocktime, - create_purse, - transfer_to_account, - transfer_from_purse_to_account, - transfer_from_purse_to_purse, - get_balance, - get_phase, - get_system_contract, - get_main_purse, - read_host_buffer, - create_contract_package_at_hash, - create_contract_user_group, - add_contract_version, - disable_contract_version, - call_contract, - call_versioned_contract, - get_named_arg_size, - get_named_arg, - remove_contract_user_group, - provision_contract_user_group_uref, - remove_contract_user_group_urefs, - print, - blake2b, - random_bytes, - enable_contract_version, - add_session_version, - manage_message_topic, - emit_message, - cost_increase_per_message, - }, - rem, - )) - } -} - -impl Distribution for Standard { - fn sample(&self, rng: &mut R) -> HostFunctionCosts { - HostFunctionCosts { - read_value: rng.gen(), - dictionary_get: rng.gen(), - write: rng.gen(), - dictionary_put: rng.gen(), - add: rng.gen(), - new_uref: rng.gen(), - load_named_keys: rng.gen(), - ret: rng.gen(), - get_key: rng.gen(), - has_key: rng.gen(), - put_key: rng.gen(), - remove_key: rng.gen(), - revert: rng.gen(), - is_valid_uref: rng.gen(), - add_associated_key: rng.gen(), - remove_associated_key: rng.gen(), - update_associated_key: rng.gen(), - set_action_threshold: rng.gen(), - get_caller: rng.gen(), - get_blocktime: rng.gen(), - create_purse: rng.gen(), - transfer_to_account: rng.gen(), - transfer_from_purse_to_account: rng.gen(), - transfer_from_purse_to_purse: rng.gen(), - get_balance: rng.gen(), - get_phase: rng.gen(), - get_system_contract: rng.gen(), - get_main_purse: rng.gen(), - read_host_buffer: rng.gen(), - create_contract_package_at_hash: rng.gen(), - create_contract_user_group: rng.gen(), - add_contract_version: rng.gen(), - disable_contract_version: rng.gen(), - call_contract: rng.gen(), - call_versioned_contract: rng.gen(), - get_named_arg_size: rng.gen(), - get_named_arg: rng.gen(), - remove_contract_user_group: rng.gen(), - provision_contract_user_group_uref: rng.gen(), - remove_contract_user_group_urefs: rng.gen(), - print: rng.gen(), - blake2b: rng.gen(), - random_bytes: rng.gen(), - enable_contract_version: rng.gen(), - add_session_version: rng.gen(), - manage_message_topic: rng.gen(), - emit_message: rng.gen(), - cost_increase_per_message: rng.gen(), - } - } -} - -#[doc(hidden)] -#[cfg(any(feature = "gens", test))] -pub mod gens { - use proptest::{num, prelude::*}; - - use crate::{HostFunction, HostFunctionCost, HostFunctionCosts}; - - #[allow(unused)] - pub fn host_function_cost_arb() -> impl Strategy> { - (any::(), any::()) - .prop_map(|(cost, arguments)| HostFunction::new(cost, arguments)) - } - - prop_compose! { - pub fn host_function_costs_arb() ( - read_value in host_function_cost_arb(), - dictionary_get in host_function_cost_arb(), - write in host_function_cost_arb(), - dictionary_put in host_function_cost_arb(), - add in host_function_cost_arb(), - new_uref in host_function_cost_arb(), - load_named_keys in host_function_cost_arb(), - ret in host_function_cost_arb(), - get_key in host_function_cost_arb(), - has_key in host_function_cost_arb(), - put_key in host_function_cost_arb(), - remove_key in host_function_cost_arb(), - revert in host_function_cost_arb(), - is_valid_uref in host_function_cost_arb(), - add_associated_key in host_function_cost_arb(), - remove_associated_key in host_function_cost_arb(), - update_associated_key in host_function_cost_arb(), - set_action_threshold in host_function_cost_arb(), - get_caller in host_function_cost_arb(), - get_blocktime in host_function_cost_arb(), - create_purse in host_function_cost_arb(), - transfer_to_account in host_function_cost_arb(), - transfer_from_purse_to_account in host_function_cost_arb(), - transfer_from_purse_to_purse in host_function_cost_arb(), - get_balance in host_function_cost_arb(), - get_phase in host_function_cost_arb(), - get_system_contract in host_function_cost_arb(), - get_main_purse in host_function_cost_arb(), - read_host_buffer in host_function_cost_arb(), - create_contract_package_at_hash in host_function_cost_arb(), - create_contract_user_group in host_function_cost_arb(), - add_contract_version in host_function_cost_arb(), - disable_contract_version in host_function_cost_arb(), - call_contract in host_function_cost_arb(), - call_versioned_contract in host_function_cost_arb(), - get_named_arg_size in host_function_cost_arb(), - get_named_arg in host_function_cost_arb(), - remove_contract_user_group in host_function_cost_arb(), - provision_contract_user_group_uref in host_function_cost_arb(), - remove_contract_user_group_urefs in host_function_cost_arb(), - print in host_function_cost_arb(), - blake2b in host_function_cost_arb(), - random_bytes in host_function_cost_arb(), - enable_contract_version in host_function_cost_arb(), - add_session_version in host_function_cost_arb(), - manage_message_topic in host_function_cost_arb(), - emit_message in host_function_cost_arb(), - cost_increase_per_message in num::u32::ANY, - ) -> HostFunctionCosts { - HostFunctionCosts { - read_value, - dictionary_get, - write, - dictionary_put, - add, - new_uref, - load_named_keys, - ret, - get_key, - has_key, - put_key, - remove_key, - revert, - is_valid_uref, - add_associated_key, - remove_associated_key, - update_associated_key, - set_action_threshold, - get_caller, - get_blocktime, - create_purse, - transfer_to_account, - transfer_from_purse_to_account, - transfer_from_purse_to_purse, - get_balance, - get_phase, - get_system_contract, - get_main_purse, - read_host_buffer, - create_contract_package_at_hash, - create_contract_user_group, - add_contract_version, - disable_contract_version, - call_contract, - call_versioned_contract, - get_named_arg_size, - get_named_arg, - remove_contract_user_group, - provision_contract_user_group_uref, - remove_contract_user_group_urefs, - print, - blake2b, - random_bytes, - enable_contract_version, - add_session_version, - manage_message_topic, - emit_message, - cost_increase_per_message, - } - } - } -} - -#[cfg(test)] -mod tests { - use crate::U512; - - use super::*; - - const COST: Cost = 42; - const ARGUMENT_COSTS: [Cost; 3] = [123, 456, 789]; - const WEIGHTS: [Cost; 3] = [1000, 1100, 1200]; - - #[test] - fn calculate_gas_cost_for_host_function() { - let host_function = HostFunction::new(COST, ARGUMENT_COSTS); - let expected_cost = COST - + (ARGUMENT_COSTS[0] * WEIGHTS[0]) - + (ARGUMENT_COSTS[1] * WEIGHTS[1]) - + (ARGUMENT_COSTS[2] * WEIGHTS[2]); - assert_eq!( - host_function.calculate_gas_cost(WEIGHTS), - Gas::new(expected_cost.into()) - ); - } - - #[test] - fn calculate_gas_cost_would_overflow() { - let large_value = Cost::max_value(); - - let host_function = HostFunction::new( - large_value, - [large_value, large_value, large_value, large_value], - ); - - let lhs = - host_function.calculate_gas_cost([large_value, large_value, large_value, large_value]); - - let large_value = U512::from(large_value); - let rhs = large_value + (U512::from(4) * large_value * large_value); - - assert_eq!(lhs, Gas::new(rhs)); - } -} - -#[cfg(test)] -mod proptests { - use proptest::prelude::*; - - use crate::bytesrepr; - - use super::*; - - type Signature = [Cost; 10]; - - proptest! { - #[test] - fn test_host_function(host_function in gens::host_function_cost_arb::()) { - bytesrepr::test_serialization_roundtrip(&host_function); - } - - #[test] - fn test_host_function_costs(host_function_costs in gens::host_function_costs_arb()) { - bytesrepr::test_serialization_roundtrip(&host_function_costs); - } - } -} diff --git a/casper_types_ver_2_0/src/chainspec/vm_config/message_limits.rs b/casper_types_ver_2_0/src/chainspec/vm_config/message_limits.rs deleted file mode 100644 index 93635153..00000000 --- a/casper_types_ver_2_0/src/chainspec/vm_config/message_limits.rs +++ /dev/null @@ -1,131 +0,0 @@ -#[cfg(feature = "datasize")] -use datasize::DataSize; -use rand::{distributions::Standard, prelude::*, Rng}; -use serde::{Deserialize, Serialize}; - -use crate::bytesrepr::{self, FromBytes, ToBytes}; - -/// Configuration for messages limits. -#[derive(Clone, Copy, PartialEq, Eq, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[serde(deny_unknown_fields)] -pub struct MessageLimits { - /// Maximum size (in bytes) of a topic name string. - pub max_topic_name_size: u32, - /// Maximum message size in bytes. - pub max_message_size: u32, - /// Maximum number of topics that a contract can register. - pub max_topics_per_contract: u32, -} - -impl MessageLimits { - /// Returns the max number of topics a contract can register. - pub fn max_topics_per_contract(&self) -> u32 { - self.max_topics_per_contract - } - - /// Returns the maximum allowed size for the topic name string. - pub fn max_topic_name_size(&self) -> u32 { - self.max_topic_name_size - } - - /// Returns the maximum allowed size (in bytes) of the serialized message payload. - pub fn max_message_size(&self) -> u32 { - self.max_message_size - } -} - -impl Default for MessageLimits { - fn default() -> Self { - Self { - max_topic_name_size: 256, - max_message_size: 1024, - max_topics_per_contract: 128, - } - } -} - -impl ToBytes for MessageLimits { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut ret = bytesrepr::unchecked_allocate_buffer(self); - - ret.append(&mut self.max_topic_name_size.to_bytes()?); - ret.append(&mut self.max_message_size.to_bytes()?); - ret.append(&mut self.max_topics_per_contract.to_bytes()?); - - Ok(ret) - } - - fn serialized_length(&self) -> usize { - self.max_topic_name_size.serialized_length() - + self.max_message_size.serialized_length() - + self.max_topics_per_contract.serialized_length() - } -} - -impl FromBytes for MessageLimits { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (max_topic_name_size, rem) = FromBytes::from_bytes(bytes)?; - let (max_message_size, rem) = FromBytes::from_bytes(rem)?; - let (max_topics_per_contract, rem) = FromBytes::from_bytes(rem)?; - - Ok(( - MessageLimits { - max_topic_name_size, - max_message_size, - max_topics_per_contract, - }, - rem, - )) - } -} - -impl Distribution for Standard { - fn sample(&self, rng: &mut R) -> MessageLimits { - MessageLimits { - max_topic_name_size: rng.gen(), - max_message_size: rng.gen(), - max_topics_per_contract: rng.gen(), - } - } -} - -#[doc(hidden)] -#[cfg(any(feature = "gens", test))] -pub mod gens { - use proptest::{num, prop_compose}; - - use super::MessageLimits; - - prop_compose! { - pub fn message_limits_arb()( - max_topic_name_size in num::u32::ANY, - max_message_size in num::u32::ANY, - max_topics_per_contract in num::u32::ANY, - ) -> MessageLimits { - MessageLimits { - max_topic_name_size, - max_message_size, - max_topics_per_contract, - } - } - } -} - -#[cfg(test)] -mod tests { - use proptest::proptest; - - use crate::bytesrepr; - - use super::gens; - - proptest! { - #[test] - fn should_serialize_and_deserialize_with_arbitrary_values( - message_limits in gens::message_limits_arb() - ) { - bytesrepr::test_serialization_roundtrip(&message_limits); - } - } -} diff --git a/casper_types_ver_2_0/src/chainspec/vm_config/mint_costs.rs b/casper_types_ver_2_0/src/chainspec/vm_config/mint_costs.rs deleted file mode 100644 index 90f0d750..00000000 --- a/casper_types_ver_2_0/src/chainspec/vm_config/mint_costs.rs +++ /dev/null @@ -1,172 +0,0 @@ -//! Costs of the mint system contract. -#[cfg(feature = "datasize")] -use datasize::DataSize; -use rand::{distributions::Standard, prelude::*, Rng}; -use serde::{Deserialize, Serialize}; - -use crate::bytesrepr::{self, FromBytes, ToBytes}; - -/// Default cost of the `mint` mint entry point. -pub const DEFAULT_MINT_COST: u32 = 2_500_000_000; -/// Default cost of the `reduce_total_supply` mint entry point. -pub const DEFAULT_REDUCE_TOTAL_SUPPLY_COST: u32 = 10_000; -/// Default cost of the `create` mint entry point. -pub const DEFAULT_CREATE_COST: u32 = 2_500_000_000; -/// Default cost of the `balance` mint entry point. -pub const DEFAULT_BALANCE_COST: u32 = 10_000; -/// Default cost of the `transfer` mint entry point. -pub const DEFAULT_TRANSFER_COST: u32 = 10_000; -/// Default cost of the `read_base_round_reward` mint entry point. -pub const DEFAULT_READ_BASE_ROUND_REWARD_COST: u32 = 10_000; -/// Default cost of the `mint_into_existing_purse` mint entry point. -pub const DEFAULT_MINT_INTO_EXISTING_PURSE_COST: u32 = 2_500_000_000; - -/// Description of the costs of calling mint entry points. -#[derive(Clone, Copy, PartialEq, Eq, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[serde(deny_unknown_fields)] -pub struct MintCosts { - /// Cost of calling the `mint` entry point. - pub mint: u32, - /// Cost of calling the `reduce_total_supply` entry point. - pub reduce_total_supply: u32, - /// Cost of calling the `create` entry point. - pub create: u32, - /// Cost of calling the `balance` entry point. - pub balance: u32, - /// Cost of calling the `transfer` entry point. - pub transfer: u32, - /// Cost of calling the `read_base_round_reward` entry point. - pub read_base_round_reward: u32, - /// Cost of calling the `mint_into_existing_purse` entry point. - pub mint_into_existing_purse: u32, -} - -impl Default for MintCosts { - fn default() -> Self { - Self { - mint: DEFAULT_MINT_COST, - reduce_total_supply: DEFAULT_REDUCE_TOTAL_SUPPLY_COST, - create: DEFAULT_CREATE_COST, - balance: DEFAULT_BALANCE_COST, - transfer: DEFAULT_TRANSFER_COST, - read_base_round_reward: DEFAULT_READ_BASE_ROUND_REWARD_COST, - mint_into_existing_purse: DEFAULT_MINT_INTO_EXISTING_PURSE_COST, - } - } -} - -impl ToBytes for MintCosts { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut ret = bytesrepr::unchecked_allocate_buffer(self); - - let Self { - mint, - reduce_total_supply, - create, - balance, - transfer, - read_base_round_reward, - mint_into_existing_purse, - } = self; - - ret.append(&mut mint.to_bytes()?); - ret.append(&mut reduce_total_supply.to_bytes()?); - ret.append(&mut create.to_bytes()?); - ret.append(&mut balance.to_bytes()?); - ret.append(&mut transfer.to_bytes()?); - ret.append(&mut read_base_round_reward.to_bytes()?); - ret.append(&mut mint_into_existing_purse.to_bytes()?); - - Ok(ret) - } - - fn serialized_length(&self) -> usize { - let Self { - mint, - reduce_total_supply, - create, - balance, - transfer, - read_base_round_reward, - mint_into_existing_purse, - } = self; - - mint.serialized_length() - + reduce_total_supply.serialized_length() - + create.serialized_length() - + balance.serialized_length() - + transfer.serialized_length() - + read_base_round_reward.serialized_length() - + mint_into_existing_purse.serialized_length() - } -} - -impl FromBytes for MintCosts { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (mint, rem) = FromBytes::from_bytes(bytes)?; - let (reduce_total_supply, rem) = FromBytes::from_bytes(rem)?; - let (create, rem) = FromBytes::from_bytes(rem)?; - let (balance, rem) = FromBytes::from_bytes(rem)?; - let (transfer, rem) = FromBytes::from_bytes(rem)?; - let (read_base_round_reward, rem) = FromBytes::from_bytes(rem)?; - let (mint_into_existing_purse, rem) = FromBytes::from_bytes(rem)?; - - Ok(( - Self { - mint, - reduce_total_supply, - create, - balance, - transfer, - read_base_round_reward, - mint_into_existing_purse, - }, - rem, - )) - } -} - -impl Distribution for Standard { - fn sample(&self, rng: &mut R) -> MintCosts { - MintCosts { - mint: rng.gen(), - reduce_total_supply: rng.gen(), - create: rng.gen(), - balance: rng.gen(), - transfer: rng.gen(), - read_base_round_reward: rng.gen(), - mint_into_existing_purse: rng.gen(), - } - } -} - -#[doc(hidden)] -#[cfg(any(feature = "gens", test))] -pub mod gens { - use proptest::{num, prop_compose}; - - use super::MintCosts; - - prop_compose! { - pub fn mint_costs_arb()( - mint in num::u32::ANY, - reduce_total_supply in num::u32::ANY, - create in num::u32::ANY, - balance in num::u32::ANY, - transfer in num::u32::ANY, - read_base_round_reward in num::u32::ANY, - mint_into_existing_purse in num::u32::ANY, - ) -> MintCosts { - MintCosts { - mint, - reduce_total_supply, - create, - balance, - transfer, - read_base_round_reward, - mint_into_existing_purse, - } - } - } -} diff --git a/casper_types_ver_2_0/src/chainspec/vm_config/opcode_costs.rs b/casper_types_ver_2_0/src/chainspec/vm_config/opcode_costs.rs deleted file mode 100644 index 5ad8c49c..00000000 --- a/casper_types_ver_2_0/src/chainspec/vm_config/opcode_costs.rs +++ /dev/null @@ -1,773 +0,0 @@ -//! Support for Wasm opcode costs. - -#[cfg(feature = "datasize")] -use datasize::DataSize; -use derive_more::Add; -use num_traits::Zero; -use rand::{distributions::Standard, prelude::*, Rng}; -use serde::{Deserialize, Serialize}; - -use crate::bytesrepr::{self, FromBytes, ToBytes}; - -/// Default cost of the `bit` Wasm opcode. -pub const DEFAULT_BIT_COST: u32 = 300; -/// Default cost of the `add` Wasm opcode. -pub const DEFAULT_ADD_COST: u32 = 210; -/// Default cost of the `mul` Wasm opcode. -pub const DEFAULT_MUL_COST: u32 = 240; -/// Default cost of the `div` Wasm opcode. -pub const DEFAULT_DIV_COST: u32 = 320; -/// Default cost of the `load` Wasm opcode. -pub const DEFAULT_LOAD_COST: u32 = 2_500; -/// Default cost of the `store` Wasm opcode. -pub const DEFAULT_STORE_COST: u32 = 4_700; -/// Default cost of the `const` Wasm opcode. -pub const DEFAULT_CONST_COST: u32 = 110; -/// Default cost of the `local` Wasm opcode. -pub const DEFAULT_LOCAL_COST: u32 = 390; -/// Default cost of the `global` Wasm opcode. -pub const DEFAULT_GLOBAL_COST: u32 = 390; -/// Default cost of the `integer_comparison` Wasm opcode. -pub const DEFAULT_INTEGER_COMPARISON_COST: u32 = 250; -/// Default cost of the `conversion` Wasm opcode. -pub const DEFAULT_CONVERSION_COST: u32 = 420; -/// Default cost of the `unreachable` Wasm opcode. -pub const DEFAULT_UNREACHABLE_COST: u32 = 270; -/// Default cost of the `nop` Wasm opcode. -// TODO: This value is not researched. -pub const DEFAULT_NOP_COST: u32 = 200; -/// Default cost of the `current_memory` Wasm opcode. -pub const DEFAULT_CURRENT_MEMORY_COST: u32 = 290; -/// Default cost of the `grow_memory` Wasm opcode. -pub const DEFAULT_GROW_MEMORY_COST: u32 = 240_000; -/// Default cost of the `block` Wasm opcode. -pub const DEFAULT_CONTROL_FLOW_BLOCK_OPCODE: u32 = 440; -/// Default cost of the `loop` Wasm opcode. -pub const DEFAULT_CONTROL_FLOW_LOOP_OPCODE: u32 = 440; -/// Default cost of the `if` Wasm opcode. -pub const DEFAULT_CONTROL_FLOW_IF_OPCODE: u32 = 440; -/// Default cost of the `else` Wasm opcode. -pub const DEFAULT_CONTROL_FLOW_ELSE_OPCODE: u32 = 440; -/// Default cost of the `end` Wasm opcode. -pub const DEFAULT_CONTROL_FLOW_END_OPCODE: u32 = 440; -/// Default cost of the `br` Wasm opcode. -pub const DEFAULT_CONTROL_FLOW_BR_OPCODE: u32 = 35_000; -/// Default cost of the `br_if` Wasm opcode. -pub const DEFAULT_CONTROL_FLOW_BR_IF_OPCODE: u32 = 35_000; -/// Default cost of the `return` Wasm opcode. -pub const DEFAULT_CONTROL_FLOW_RETURN_OPCODE: u32 = 440; -/// Default cost of the `select` Wasm opcode. -pub const DEFAULT_CONTROL_FLOW_SELECT_OPCODE: u32 = 440; -/// Default cost of the `call` Wasm opcode. -pub const DEFAULT_CONTROL_FLOW_CALL_OPCODE: u32 = 68_000; -/// Default cost of the `call_indirect` Wasm opcode. -pub const DEFAULT_CONTROL_FLOW_CALL_INDIRECT_OPCODE: u32 = 68_000; -/// Default cost of the `drop` Wasm opcode. -pub const DEFAULT_CONTROL_FLOW_DROP_OPCODE: u32 = 440; -/// Default fixed cost of the `br_table` Wasm opcode. -pub const DEFAULT_CONTROL_FLOW_BR_TABLE_OPCODE: u32 = 35_000; -/// Default multiplier for the size of targets in `br_table` Wasm opcode. -pub const DEFAULT_CONTROL_FLOW_BR_TABLE_MULTIPLIER: u32 = 100; - -/// Definition of a cost table for a Wasm `br_table` opcode. -/// -/// Charge of a `br_table` opcode is calculated as follows: -/// -/// ```text -/// cost + (len(br_table.targets) * size_multiplier) -/// ``` -// This is done to encourage users to avoid writing code with very long `br_table`s. -#[derive(Add, Copy, Clone, PartialEq, Eq, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[serde(deny_unknown_fields)] -pub struct BrTableCost { - /// Fixed cost charge for `br_table` opcode. - pub cost: u32, - /// Multiplier for size of target labels in the `br_table` opcode. - pub size_multiplier: u32, -} - -impl Default for BrTableCost { - fn default() -> Self { - Self { - cost: DEFAULT_CONTROL_FLOW_BR_TABLE_OPCODE, - size_multiplier: DEFAULT_CONTROL_FLOW_BR_TABLE_MULTIPLIER, - } - } -} - -impl Distribution for Standard { - fn sample(&self, rng: &mut R) -> BrTableCost { - BrTableCost { - cost: rng.gen(), - size_multiplier: rng.gen(), - } - } -} - -impl ToBytes for BrTableCost { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let Self { - cost, - size_multiplier, - } = self; - - let mut ret = bytesrepr::unchecked_allocate_buffer(self); - - ret.append(&mut cost.to_bytes()?); - ret.append(&mut size_multiplier.to_bytes()?); - - Ok(ret) - } - - fn serialized_length(&self) -> usize { - let Self { - cost, - size_multiplier, - } = self; - - cost.serialized_length() + size_multiplier.serialized_length() - } -} - -impl FromBytes for BrTableCost { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (cost, bytes): (_, &[u8]) = FromBytes::from_bytes(bytes)?; - let (size_multiplier, bytes): (_, &[u8]) = FromBytes::from_bytes(bytes)?; - Ok(( - Self { - cost, - size_multiplier, - }, - bytes, - )) - } -} - -impl Zero for BrTableCost { - fn zero() -> Self { - BrTableCost { - cost: 0, - size_multiplier: 0, - } - } - - fn is_zero(&self) -> bool { - let BrTableCost { - cost, - size_multiplier, - } = self; - cost.is_zero() && size_multiplier.is_zero() - } -} - -/// Definition of a cost table for a Wasm control flow opcodes. -#[derive(Add, Copy, Clone, PartialEq, Eq, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[serde(deny_unknown_fields)] -pub struct ControlFlowCosts { - /// Cost for `block` opcode. - pub block: u32, - /// Cost for `loop` opcode. - #[serde(rename = "loop")] - pub op_loop: u32, - /// Cost for `if` opcode. - #[serde(rename = "if")] - pub op_if: u32, - /// Cost for `else` opcode. - #[serde(rename = "else")] - pub op_else: u32, - /// Cost for `end` opcode. - pub end: u32, - /// Cost for `br` opcode. - pub br: u32, - /// Cost for `br_if` opcode. - pub br_if: u32, - /// Cost for `return` opcode. - #[serde(rename = "return")] - pub op_return: u32, - /// Cost for `call` opcode. - pub call: u32, - /// Cost for `call_indirect` opcode. - pub call_indirect: u32, - /// Cost for `drop` opcode. - pub drop: u32, - /// Cost for `select` opcode. - pub select: u32, - /// Cost for `br_table` opcode. - pub br_table: BrTableCost, -} - -impl Default for ControlFlowCosts { - fn default() -> Self { - Self { - block: DEFAULT_CONTROL_FLOW_BLOCK_OPCODE, - op_loop: DEFAULT_CONTROL_FLOW_LOOP_OPCODE, - op_if: DEFAULT_CONTROL_FLOW_IF_OPCODE, - op_else: DEFAULT_CONTROL_FLOW_ELSE_OPCODE, - end: DEFAULT_CONTROL_FLOW_END_OPCODE, - br: DEFAULT_CONTROL_FLOW_BR_OPCODE, - br_if: DEFAULT_CONTROL_FLOW_BR_IF_OPCODE, - op_return: DEFAULT_CONTROL_FLOW_RETURN_OPCODE, - call: DEFAULT_CONTROL_FLOW_CALL_OPCODE, - call_indirect: DEFAULT_CONTROL_FLOW_CALL_INDIRECT_OPCODE, - drop: DEFAULT_CONTROL_FLOW_DROP_OPCODE, - select: DEFAULT_CONTROL_FLOW_SELECT_OPCODE, - br_table: Default::default(), - } - } -} - -impl ToBytes for ControlFlowCosts { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut ret = bytesrepr::unchecked_allocate_buffer(self); - - let Self { - block, - op_loop, - op_if, - op_else, - end, - br, - br_if, - op_return, - call, - call_indirect, - drop, - select, - br_table, - } = self; - ret.append(&mut block.to_bytes()?); - ret.append(&mut op_loop.to_bytes()?); - ret.append(&mut op_if.to_bytes()?); - ret.append(&mut op_else.to_bytes()?); - ret.append(&mut end.to_bytes()?); - ret.append(&mut br.to_bytes()?); - ret.append(&mut br_if.to_bytes()?); - ret.append(&mut op_return.to_bytes()?); - ret.append(&mut call.to_bytes()?); - ret.append(&mut call_indirect.to_bytes()?); - ret.append(&mut drop.to_bytes()?); - ret.append(&mut select.to_bytes()?); - ret.append(&mut br_table.to_bytes()?); - - Ok(ret) - } - - fn serialized_length(&self) -> usize { - let Self { - block, - op_loop, - op_if, - op_else, - end, - br, - br_if, - op_return, - call, - call_indirect, - drop, - select, - br_table, - } = self; - block.serialized_length() - + op_loop.serialized_length() - + op_if.serialized_length() - + op_else.serialized_length() - + end.serialized_length() - + br.serialized_length() - + br_if.serialized_length() - + op_return.serialized_length() - + call.serialized_length() - + call_indirect.serialized_length() - + drop.serialized_length() - + select.serialized_length() - + br_table.serialized_length() - } -} - -impl FromBytes for ControlFlowCosts { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (block, bytes): (_, &[u8]) = FromBytes::from_bytes(bytes)?; - let (op_loop, bytes): (_, &[u8]) = FromBytes::from_bytes(bytes)?; - let (op_if, bytes): (_, &[u8]) = FromBytes::from_bytes(bytes)?; - let (op_else, bytes): (_, &[u8]) = FromBytes::from_bytes(bytes)?; - let (end, bytes): (_, &[u8]) = FromBytes::from_bytes(bytes)?; - let (br, bytes): (_, &[u8]) = FromBytes::from_bytes(bytes)?; - let (br_if, bytes): (_, &[u8]) = FromBytes::from_bytes(bytes)?; - let (op_return, bytes): (_, &[u8]) = FromBytes::from_bytes(bytes)?; - let (call, bytes): (_, &[u8]) = FromBytes::from_bytes(bytes)?; - let (call_indirect, bytes): (_, &[u8]) = FromBytes::from_bytes(bytes)?; - let (drop, bytes): (_, &[u8]) = FromBytes::from_bytes(bytes)?; - let (select, bytes): (_, &[u8]) = FromBytes::from_bytes(bytes)?; - let (br_table, bytes): (_, &[u8]) = FromBytes::from_bytes(bytes)?; - - let control_flow_cost = ControlFlowCosts { - block, - op_loop, - op_if, - op_else, - end, - br, - br_if, - op_return, - call, - call_indirect, - drop, - select, - br_table, - }; - Ok((control_flow_cost, bytes)) - } -} - -impl Distribution for Standard { - fn sample(&self, rng: &mut R) -> ControlFlowCosts { - ControlFlowCosts { - block: rng.gen(), - op_loop: rng.gen(), - op_if: rng.gen(), - op_else: rng.gen(), - end: rng.gen(), - br: rng.gen(), - br_if: rng.gen(), - op_return: rng.gen(), - call: rng.gen(), - call_indirect: rng.gen(), - drop: rng.gen(), - select: rng.gen(), - br_table: rng.gen(), - } - } -} - -impl Zero for ControlFlowCosts { - fn zero() -> Self { - ControlFlowCosts { - block: 0, - op_loop: 0, - op_if: 0, - op_else: 0, - end: 0, - br: 0, - br_if: 0, - op_return: 0, - call: 0, - call_indirect: 0, - drop: 0, - select: 0, - br_table: BrTableCost::zero(), - } - } - - fn is_zero(&self) -> bool { - let ControlFlowCosts { - block, - op_loop, - op_if, - op_else, - end, - br, - br_if, - op_return, - call, - call_indirect, - drop, - select, - br_table, - } = self; - block.is_zero() - && op_loop.is_zero() - && op_if.is_zero() - && op_else.is_zero() - && end.is_zero() - && br.is_zero() - && br_if.is_zero() - && op_return.is_zero() - && call.is_zero() - && call_indirect.is_zero() - && drop.is_zero() - && select.is_zero() - && br_table.is_zero() - } -} - -/// Definition of a cost table for Wasm opcodes. -/// -/// This is taken (partially) from parity-ethereum. -#[derive(Add, Copy, Clone, PartialEq, Eq, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[serde(deny_unknown_fields)] -pub struct OpcodeCosts { - /// Bit operations multiplier. - pub bit: u32, - /// Arithmetic add operations multiplier. - pub add: u32, - /// Mul operations multiplier. - pub mul: u32, - /// Div operations multiplier. - pub div: u32, - /// Memory load operation multiplier. - pub load: u32, - /// Memory store operation multiplier. - pub store: u32, - /// Const operation multiplier. - #[serde(rename = "const")] - pub op_const: u32, - /// Local operations multiplier. - pub local: u32, - /// Global operations multiplier. - pub global: u32, - /// Integer operations multiplier. - pub integer_comparison: u32, - /// Conversion operations multiplier. - pub conversion: u32, - /// Unreachable operation multiplier. - pub unreachable: u32, - /// Nop operation multiplier. - pub nop: u32, - /// Get current memory operation multiplier. - pub current_memory: u32, - /// Grow memory cost, per page (64kb) - pub grow_memory: u32, - /// Control flow operations multiplier. - pub control_flow: ControlFlowCosts, -} - -impl Default for OpcodeCosts { - fn default() -> Self { - OpcodeCosts { - bit: DEFAULT_BIT_COST, - add: DEFAULT_ADD_COST, - mul: DEFAULT_MUL_COST, - div: DEFAULT_DIV_COST, - load: DEFAULT_LOAD_COST, - store: DEFAULT_STORE_COST, - op_const: DEFAULT_CONST_COST, - local: DEFAULT_LOCAL_COST, - global: DEFAULT_GLOBAL_COST, - integer_comparison: DEFAULT_INTEGER_COMPARISON_COST, - conversion: DEFAULT_CONVERSION_COST, - unreachable: DEFAULT_UNREACHABLE_COST, - nop: DEFAULT_NOP_COST, - current_memory: DEFAULT_CURRENT_MEMORY_COST, - grow_memory: DEFAULT_GROW_MEMORY_COST, - control_flow: ControlFlowCosts::default(), - } - } -} - -impl Distribution for Standard { - fn sample(&self, rng: &mut R) -> OpcodeCosts { - OpcodeCosts { - bit: rng.gen(), - add: rng.gen(), - mul: rng.gen(), - div: rng.gen(), - load: rng.gen(), - store: rng.gen(), - op_const: rng.gen(), - local: rng.gen(), - global: rng.gen(), - integer_comparison: rng.gen(), - conversion: rng.gen(), - unreachable: rng.gen(), - nop: rng.gen(), - current_memory: rng.gen(), - grow_memory: rng.gen(), - control_flow: rng.gen(), - } - } -} - -impl ToBytes for OpcodeCosts { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut ret = bytesrepr::unchecked_allocate_buffer(self); - - let Self { - bit, - add, - mul, - div, - load, - store, - op_const, - local, - global, - integer_comparison, - conversion, - unreachable, - nop, - current_memory, - grow_memory, - control_flow, - } = self; - - ret.append(&mut bit.to_bytes()?); - ret.append(&mut add.to_bytes()?); - ret.append(&mut mul.to_bytes()?); - ret.append(&mut div.to_bytes()?); - ret.append(&mut load.to_bytes()?); - ret.append(&mut store.to_bytes()?); - ret.append(&mut op_const.to_bytes()?); - ret.append(&mut local.to_bytes()?); - ret.append(&mut global.to_bytes()?); - ret.append(&mut integer_comparison.to_bytes()?); - ret.append(&mut conversion.to_bytes()?); - ret.append(&mut unreachable.to_bytes()?); - ret.append(&mut nop.to_bytes()?); - ret.append(&mut current_memory.to_bytes()?); - ret.append(&mut grow_memory.to_bytes()?); - ret.append(&mut control_flow.to_bytes()?); - - Ok(ret) - } - - fn serialized_length(&self) -> usize { - let Self { - bit, - add, - mul, - div, - load, - store, - op_const, - local, - global, - integer_comparison, - conversion, - unreachable, - nop, - current_memory, - grow_memory, - control_flow, - } = self; - bit.serialized_length() - + add.serialized_length() - + mul.serialized_length() - + div.serialized_length() - + load.serialized_length() - + store.serialized_length() - + op_const.serialized_length() - + local.serialized_length() - + global.serialized_length() - + integer_comparison.serialized_length() - + conversion.serialized_length() - + unreachable.serialized_length() - + nop.serialized_length() - + current_memory.serialized_length() - + grow_memory.serialized_length() - + control_flow.serialized_length() - } -} - -impl FromBytes for OpcodeCosts { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (bit, bytes): (_, &[u8]) = FromBytes::from_bytes(bytes)?; - let (add, bytes): (_, &[u8]) = FromBytes::from_bytes(bytes)?; - let (mul, bytes): (_, &[u8]) = FromBytes::from_bytes(bytes)?; - let (div, bytes): (_, &[u8]) = FromBytes::from_bytes(bytes)?; - let (load, bytes): (_, &[u8]) = FromBytes::from_bytes(bytes)?; - let (store, bytes): (_, &[u8]) = FromBytes::from_bytes(bytes)?; - let (const_, bytes): (_, &[u8]) = FromBytes::from_bytes(bytes)?; - let (local, bytes): (_, &[u8]) = FromBytes::from_bytes(bytes)?; - let (global, bytes): (_, &[u8]) = FromBytes::from_bytes(bytes)?; - let (integer_comparison, bytes): (_, &[u8]) = FromBytes::from_bytes(bytes)?; - let (conversion, bytes): (_, &[u8]) = FromBytes::from_bytes(bytes)?; - let (unreachable, bytes): (_, &[u8]) = FromBytes::from_bytes(bytes)?; - let (nop, bytes): (_, &[u8]) = FromBytes::from_bytes(bytes)?; - let (current_memory, bytes): (_, &[u8]) = FromBytes::from_bytes(bytes)?; - let (grow_memory, bytes): (_, &[u8]) = FromBytes::from_bytes(bytes)?; - let (control_flow, bytes): (_, &[u8]) = FromBytes::from_bytes(bytes)?; - - let opcode_costs = OpcodeCosts { - bit, - add, - mul, - div, - load, - store, - op_const: const_, - local, - global, - integer_comparison, - conversion, - unreachable, - nop, - current_memory, - grow_memory, - control_flow, - }; - Ok((opcode_costs, bytes)) - } -} - -impl Zero for OpcodeCosts { - fn zero() -> Self { - Self { - bit: 0, - add: 0, - mul: 0, - div: 0, - load: 0, - store: 0, - op_const: 0, - local: 0, - global: 0, - integer_comparison: 0, - conversion: 0, - unreachable: 0, - nop: 0, - current_memory: 0, - grow_memory: 0, - control_flow: ControlFlowCosts::zero(), - } - } - - fn is_zero(&self) -> bool { - let OpcodeCosts { - bit, - add, - mul, - div, - load, - store, - op_const, - local, - global, - integer_comparison, - conversion, - unreachable, - nop, - current_memory, - grow_memory, - control_flow, - } = self; - bit.is_zero() - && add.is_zero() - && mul.is_zero() - && div.is_zero() - && load.is_zero() - && store.is_zero() - && op_const.is_zero() - && local.is_zero() - && global.is_zero() - && integer_comparison.is_zero() - && conversion.is_zero() - && unreachable.is_zero() - && nop.is_zero() - && current_memory.is_zero() - && grow_memory.is_zero() - && control_flow.is_zero() - } -} - -#[doc(hidden)] -#[cfg(any(feature = "gens", test))] -pub mod gens { - use proptest::{num, prop_compose}; - - use crate::{BrTableCost, ControlFlowCosts, OpcodeCosts}; - - prop_compose! { - pub fn br_table_cost_arb()( - cost in num::u32::ANY, - size_multiplier in num::u32::ANY, - ) -> BrTableCost { - BrTableCost { cost, size_multiplier } - } - } - - prop_compose! { - pub fn control_flow_cost_arb()( - block in num::u32::ANY, - op_loop in num::u32::ANY, - op_if in num::u32::ANY, - op_else in num::u32::ANY, - end in num::u32::ANY, - br in num::u32::ANY, - br_if in num::u32::ANY, - br_table in br_table_cost_arb(), - op_return in num::u32::ANY, - call in num::u32::ANY, - call_indirect in num::u32::ANY, - drop in num::u32::ANY, - select in num::u32::ANY, - ) -> ControlFlowCosts { - ControlFlowCosts { - block, - op_loop, - op_if, - op_else, - end, - br, - br_if, - br_table, - op_return, - call, - call_indirect, - drop, - select - } - } - - } - - prop_compose! { - pub fn opcode_costs_arb()( - bit in num::u32::ANY, - add in num::u32::ANY, - mul in num::u32::ANY, - div in num::u32::ANY, - load in num::u32::ANY, - store in num::u32::ANY, - op_const in num::u32::ANY, - local in num::u32::ANY, - global in num::u32::ANY, - integer_comparison in num::u32::ANY, - conversion in num::u32::ANY, - unreachable in num::u32::ANY, - nop in num::u32::ANY, - current_memory in num::u32::ANY, - grow_memory in num::u32::ANY, - control_flow in control_flow_cost_arb(), - ) -> OpcodeCosts { - OpcodeCosts { - bit, - add, - mul, - div, - load, - store, - op_const, - local, - global, - integer_comparison, - conversion, - unreachable, - nop, - current_memory, - grow_memory, - control_flow, - } - } - } -} - -#[cfg(test)] -mod tests { - use proptest::proptest; - - use crate::bytesrepr; - - use super::gens; - - proptest! { - #[test] - fn should_serialize_and_deserialize_with_arbitrary_values( - opcode_costs in gens::opcode_costs_arb() - ) { - bytesrepr::test_serialization_roundtrip(&opcode_costs); - } - } -} diff --git a/casper_types_ver_2_0/src/chainspec/vm_config/standard_payment_costs.rs b/casper_types_ver_2_0/src/chainspec/vm_config/standard_payment_costs.rs deleted file mode 100644 index 618f7d66..00000000 --- a/casper_types_ver_2_0/src/chainspec/vm_config/standard_payment_costs.rs +++ /dev/null @@ -1,70 +0,0 @@ -//! Costs of the standard payment system contract. -#[cfg(feature = "datasize")] -use datasize::DataSize; -use rand::{distributions::Standard, prelude::*, Rng}; -use serde::{Deserialize, Serialize}; - -use crate::bytesrepr::{self, FromBytes, ToBytes}; - -/// Default cost of the `pay` standard payment entry point. -const DEFAULT_PAY_COST: u32 = 10_000; - -/// Description of the costs of calling standard payment entry points. -#[derive(Clone, Copy, PartialEq, Eq, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[serde(deny_unknown_fields)] -pub struct StandardPaymentCosts { - /// Cost of calling the `pay` entry point. - pub pay: u32, -} - -impl Default for StandardPaymentCosts { - fn default() -> Self { - Self { - pay: DEFAULT_PAY_COST, - } - } -} - -impl ToBytes for StandardPaymentCosts { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut ret = bytesrepr::unchecked_allocate_buffer(self); - ret.append(&mut self.pay.to_bytes()?); - Ok(ret) - } - - fn serialized_length(&self) -> usize { - self.pay.serialized_length() - } -} - -impl FromBytes for StandardPaymentCosts { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (pay, rem) = FromBytes::from_bytes(bytes)?; - Ok((Self { pay }, rem)) - } -} - -impl Distribution for Standard { - fn sample(&self, rng: &mut R) -> StandardPaymentCosts { - StandardPaymentCosts { pay: rng.gen() } - } -} - -#[doc(hidden)] -#[cfg(any(feature = "gens", test))] -pub mod gens { - use proptest::{num, prop_compose}; - - use super::StandardPaymentCosts; - - prop_compose! { - pub fn standard_payment_costs_arb()( - pay in num::u32::ANY, - ) -> StandardPaymentCosts { - StandardPaymentCosts { - pay, - } - } - } -} diff --git a/casper_types_ver_2_0/src/chainspec/vm_config/storage_costs.rs b/casper_types_ver_2_0/src/chainspec/vm_config/storage_costs.rs deleted file mode 100644 index 0ce4e9ce..00000000 --- a/casper_types_ver_2_0/src/chainspec/vm_config/storage_costs.rs +++ /dev/null @@ -1,138 +0,0 @@ -//! Support for storage costs. -#[cfg(feature = "datasize")] -use datasize::DataSize; -use derive_more::Add; -use num_traits::Zero; -use rand::{distributions::Standard, prelude::*, Rng}; -use serde::{Deserialize, Serialize}; - -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - Gas, U512, -}; - -/// Default gas cost per byte stored. -pub const DEFAULT_GAS_PER_BYTE_COST: u32 = 630_000; - -/// Represents a cost table for storage costs. -#[derive(Add, Copy, Clone, PartialEq, Eq, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[serde(deny_unknown_fields)] -pub struct StorageCosts { - /// Gas charged per byte stored in the global state. - gas_per_byte: u32, -} - -impl StorageCosts { - /// Creates new `StorageCosts`. - pub const fn new(gas_per_byte: u32) -> Self { - Self { gas_per_byte } - } - - /// Returns amount of gas per byte stored. - pub fn gas_per_byte(&self) -> u32 { - self.gas_per_byte - } - - /// Calculates gas cost for storing `bytes`. - pub fn calculate_gas_cost(&self, bytes: usize) -> Gas { - let value = U512::from(self.gas_per_byte) * U512::from(bytes); - Gas::new(value) - } -} - -impl Default for StorageCosts { - fn default() -> Self { - Self { - gas_per_byte: DEFAULT_GAS_PER_BYTE_COST, - } - } -} - -impl Distribution for Standard { - fn sample(&self, rng: &mut R) -> StorageCosts { - StorageCosts { - gas_per_byte: rng.gen(), - } - } -} - -impl ToBytes for StorageCosts { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut ret = bytesrepr::unchecked_allocate_buffer(self); - - ret.append(&mut self.gas_per_byte.to_bytes()?); - - Ok(ret) - } - - fn serialized_length(&self) -> usize { - self.gas_per_byte.serialized_length() - } -} - -impl FromBytes for StorageCosts { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (gas_per_byte, rem) = FromBytes::from_bytes(bytes)?; - - Ok((StorageCosts { gas_per_byte }, rem)) - } -} - -impl Zero for StorageCosts { - fn zero() -> Self { - StorageCosts { gas_per_byte: 0 } - } - - fn is_zero(&self) -> bool { - self.gas_per_byte.is_zero() - } -} - -#[cfg(test)] -pub mod tests { - use crate::U512; - - use super::*; - - const SMALL_WEIGHT: usize = 123456789; - const LARGE_WEIGHT: usize = usize::max_value(); - - #[test] - fn should_calculate_gas_cost() { - let storage_costs = StorageCosts::default(); - - let cost = storage_costs.calculate_gas_cost(SMALL_WEIGHT); - - let expected_cost = U512::from(DEFAULT_GAS_PER_BYTE_COST) * U512::from(SMALL_WEIGHT); - assert_eq!(cost, Gas::new(expected_cost)); - } - - #[test] - fn should_calculate_big_gas_cost() { - let storage_costs = StorageCosts::default(); - - let cost = storage_costs.calculate_gas_cost(LARGE_WEIGHT); - - let expected_cost = U512::from(DEFAULT_GAS_PER_BYTE_COST) * U512::from(LARGE_WEIGHT); - assert_eq!(cost, Gas::new(expected_cost)); - } -} - -#[doc(hidden)] -#[cfg(any(feature = "gens", test))] -pub mod gens { - use proptest::{num, prop_compose}; - - use super::StorageCosts; - - prop_compose! { - pub fn storage_costs_arb()( - gas_per_byte in num::u32::ANY, - ) -> StorageCosts { - StorageCosts { - gas_per_byte, - } - } - } -} diff --git a/casper_types_ver_2_0/src/chainspec/vm_config/system_config.rs b/casper_types_ver_2_0/src/chainspec/vm_config/system_config.rs deleted file mode 100644 index d6f61677..00000000 --- a/casper_types_ver_2_0/src/chainspec/vm_config/system_config.rs +++ /dev/null @@ -1,179 +0,0 @@ -#[cfg(feature = "datasize")] -use datasize::DataSize; -use rand::{distributions::Standard, prelude::*, Rng}; -use serde::{Deserialize, Serialize}; - -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - chainspec::vm_config::{AuctionCosts, HandlePaymentCosts, MintCosts, StandardPaymentCosts}, -}; - -/// Default gas cost for a wasmless transfer. -pub const DEFAULT_WASMLESS_TRANSFER_COST: u32 = 100_000_000; - -/// Definition of costs in the system. -/// -/// This structure contains the costs of all the system contract's entry points and, additionally, -/// it defines a wasmless transfer cost. -#[derive(Clone, Copy, PartialEq, Eq, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[serde(deny_unknown_fields)] -pub struct SystemConfig { - /// Wasmless transfer cost expressed in gas. - wasmless_transfer_cost: u32, - - /// Configuration of auction entrypoint costs. - auction_costs: AuctionCosts, - - /// Configuration of mint entrypoint costs. - mint_costs: MintCosts, - - /// Configuration of handle payment entrypoint costs. - handle_payment_costs: HandlePaymentCosts, - - /// Configuration of standard payment costs. - standard_payment_costs: StandardPaymentCosts, -} - -impl SystemConfig { - /// Creates new system config instance. - pub fn new( - wasmless_transfer_cost: u32, - auction_costs: AuctionCosts, - mint_costs: MintCosts, - handle_payment_costs: HandlePaymentCosts, - standard_payment_costs: StandardPaymentCosts, - ) -> Self { - Self { - wasmless_transfer_cost, - auction_costs, - mint_costs, - handle_payment_costs, - standard_payment_costs, - } - } - - /// Returns wasmless transfer cost. - pub fn wasmless_transfer_cost(&self) -> u32 { - self.wasmless_transfer_cost - } - - /// Returns the costs of executing auction entry points. - pub fn auction_costs(&self) -> &AuctionCosts { - &self.auction_costs - } - - /// Returns the costs of executing mint entry points. - pub fn mint_costs(&self) -> &MintCosts { - &self.mint_costs - } - - /// Returns the costs of executing `handle_payment` entry points. - pub fn handle_payment_costs(&self) -> &HandlePaymentCosts { - &self.handle_payment_costs - } - - /// Returns the costs of executing `standard_payment` entry points. - pub fn standard_payment_costs(&self) -> &StandardPaymentCosts { - &self.standard_payment_costs - } -} - -impl Default for SystemConfig { - fn default() -> Self { - Self { - wasmless_transfer_cost: DEFAULT_WASMLESS_TRANSFER_COST, - auction_costs: AuctionCosts::default(), - mint_costs: MintCosts::default(), - handle_payment_costs: HandlePaymentCosts::default(), - standard_payment_costs: StandardPaymentCosts::default(), - } - } -} - -impl Distribution for Standard { - fn sample(&self, rng: &mut R) -> SystemConfig { - SystemConfig { - wasmless_transfer_cost: rng.gen(), - auction_costs: rng.gen(), - mint_costs: rng.gen(), - handle_payment_costs: rng.gen(), - standard_payment_costs: rng.gen(), - } - } -} - -impl ToBytes for SystemConfig { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut ret = bytesrepr::unchecked_allocate_buffer(self); - - ret.append(&mut self.wasmless_transfer_cost.to_bytes()?); - ret.append(&mut self.auction_costs.to_bytes()?); - ret.append(&mut self.mint_costs.to_bytes()?); - ret.append(&mut self.handle_payment_costs.to_bytes()?); - ret.append(&mut self.standard_payment_costs.to_bytes()?); - - Ok(ret) - } - - fn serialized_length(&self) -> usize { - self.wasmless_transfer_cost.serialized_length() - + self.auction_costs.serialized_length() - + self.mint_costs.serialized_length() - + self.handle_payment_costs.serialized_length() - + self.standard_payment_costs.serialized_length() - } -} - -impl FromBytes for SystemConfig { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (wasmless_transfer_cost, rem) = FromBytes::from_bytes(bytes)?; - let (auction_costs, rem) = FromBytes::from_bytes(rem)?; - let (mint_costs, rem) = FromBytes::from_bytes(rem)?; - let (handle_payment_costs, rem) = FromBytes::from_bytes(rem)?; - let (standard_payment_costs, rem) = FromBytes::from_bytes(rem)?; - Ok(( - SystemConfig::new( - wasmless_transfer_cost, - auction_costs, - mint_costs, - handle_payment_costs, - standard_payment_costs, - ), - rem, - )) - } -} - -#[doc(hidden)] -#[cfg(any(feature = "gens", test))] -pub mod gens { - use proptest::{num, prop_compose}; - - use crate::{ - chainspec::vm_config::{ - auction_costs::gens::auction_costs_arb, - handle_payment_costs::gens::handle_payment_costs_arb, mint_costs::gens::mint_costs_arb, - standard_payment_costs::gens::standard_payment_costs_arb, - }, - SystemConfig, - }; - - prop_compose! { - pub fn system_config_arb()( - wasmless_transfer_cost in num::u32::ANY, - auction_costs in auction_costs_arb(), - mint_costs in mint_costs_arb(), - handle_payment_costs in handle_payment_costs_arb(), - standard_payment_costs in standard_payment_costs_arb(), - ) -> SystemConfig { - SystemConfig { - wasmless_transfer_cost, - auction_costs, - mint_costs, - handle_payment_costs, - standard_payment_costs, - } - } - } -} diff --git a/casper_types_ver_2_0/src/chainspec/vm_config/upgrade_config.rs b/casper_types_ver_2_0/src/chainspec/vm_config/upgrade_config.rs deleted file mode 100644 index 21e2150a..00000000 --- a/casper_types_ver_2_0/src/chainspec/vm_config/upgrade_config.rs +++ /dev/null @@ -1,112 +0,0 @@ -use num_rational::Ratio; -use std::collections::BTreeMap; - -use crate::{ChainspecRegistry, Digest, EraId, Key, ProtocolVersion, StoredValue}; - -/// Represents the configuration of a protocol upgrade. -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct UpgradeConfig { - pre_state_hash: Digest, - current_protocol_version: ProtocolVersion, - new_protocol_version: ProtocolVersion, - activation_point: Option, - new_validator_slots: Option, - new_auction_delay: Option, - new_locked_funds_period_millis: Option, - new_round_seigniorage_rate: Option>, - new_unbonding_delay: Option, - global_state_update: BTreeMap, - chainspec_registry: ChainspecRegistry, -} - -impl UpgradeConfig { - /// Create new upgrade config. - #[allow(clippy::too_many_arguments)] - pub fn new( - pre_state_hash: Digest, - current_protocol_version: ProtocolVersion, - new_protocol_version: ProtocolVersion, - activation_point: Option, - new_validator_slots: Option, - new_auction_delay: Option, - new_locked_funds_period_millis: Option, - new_round_seigniorage_rate: Option>, - new_unbonding_delay: Option, - global_state_update: BTreeMap, - chainspec_registry: ChainspecRegistry, - ) -> Self { - UpgradeConfig { - pre_state_hash, - current_protocol_version, - new_protocol_version, - activation_point, - new_validator_slots, - new_auction_delay, - new_locked_funds_period_millis, - new_round_seigniorage_rate, - new_unbonding_delay, - global_state_update, - chainspec_registry, - } - } - - /// Returns the current state root state hash - pub fn pre_state_hash(&self) -> Digest { - self.pre_state_hash - } - - /// Returns current protocol version of this upgrade. - pub fn current_protocol_version(&self) -> ProtocolVersion { - self.current_protocol_version - } - - /// Returns new protocol version of this upgrade. - pub fn new_protocol_version(&self) -> ProtocolVersion { - self.new_protocol_version - } - - /// Returns activation point in eras. - pub fn activation_point(&self) -> Option { - self.activation_point - } - - /// Returns new validator slots if specified. - pub fn new_validator_slots(&self) -> Option { - self.new_validator_slots - } - - /// Returns new auction delay if specified. - pub fn new_auction_delay(&self) -> Option { - self.new_auction_delay - } - - /// Returns new locked funds period if specified. - pub fn new_locked_funds_period_millis(&self) -> Option { - self.new_locked_funds_period_millis - } - - /// Returns new round seigniorage rate if specified. - pub fn new_round_seigniorage_rate(&self) -> Option> { - self.new_round_seigniorage_rate - } - - /// Returns new unbonding delay if specified. - pub fn new_unbonding_delay(&self) -> Option { - self.new_unbonding_delay - } - - /// Returns new map of emergency global state updates. - pub fn global_state_update(&self) -> &BTreeMap { - &self.global_state_update - } - - /// Returns a reference to the chainspec registry. - pub fn chainspec_registry(&self) -> &ChainspecRegistry { - &self.chainspec_registry - } - - /// Sets new pre state hash. - pub fn with_pre_state_hash(&mut self, pre_state_hash: Digest) { - self.pre_state_hash = pre_state_hash; - } -} diff --git a/casper_types_ver_2_0/src/chainspec/vm_config/wasm_config.rs b/casper_types_ver_2_0/src/chainspec/vm_config/wasm_config.rs deleted file mode 100644 index ab73b44b..00000000 --- a/casper_types_ver_2_0/src/chainspec/vm_config/wasm_config.rs +++ /dev/null @@ -1,186 +0,0 @@ -//! Configuration of the Wasm execution engine. -#[cfg(feature = "datasize")] -use datasize::DataSize; -use rand::{distributions::Standard, prelude::*, Rng}; -use serde::{Deserialize, Serialize}; - -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - chainspec::vm_config::{HostFunctionCosts, MessageLimits, OpcodeCosts, StorageCosts}, -}; - -/// Default maximum number of pages of the Wasm memory. -pub const DEFAULT_WASM_MAX_MEMORY: u32 = 64; -/// Default maximum stack height. -pub const DEFAULT_MAX_STACK_HEIGHT: u32 = 500; - -/// Configuration of the Wasm execution environment. -/// -/// This structure contains various Wasm execution configuration options, such as memory limits, -/// stack limits and costs. -#[derive(Clone, Copy, PartialEq, Eq, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[serde(deny_unknown_fields)] -pub struct WasmConfig { - /// Maximum amount of heap memory (represented in 64kB pages) each contract can use. - pub max_memory: u32, - /// Max stack height (native WebAssembly stack limiter). - pub max_stack_height: u32, - /// Wasm opcode costs table. - opcode_costs: OpcodeCosts, - /// Storage costs. - storage_costs: StorageCosts, - /// Host function costs table. - host_function_costs: HostFunctionCosts, - /// Messages limits. - messages_limits: MessageLimits, -} - -impl WasmConfig { - /// Creates new Wasm config. - pub const fn new( - max_memory: u32, - max_stack_height: u32, - opcode_costs: OpcodeCosts, - storage_costs: StorageCosts, - host_function_costs: HostFunctionCosts, - messages_limits: MessageLimits, - ) -> Self { - Self { - max_memory, - max_stack_height, - opcode_costs, - storage_costs, - host_function_costs, - messages_limits, - } - } - - /// Returns opcode costs. - pub fn opcode_costs(&self) -> OpcodeCosts { - self.opcode_costs - } - - /// Returns storage costs. - pub fn storage_costs(&self) -> StorageCosts { - self.storage_costs - } - - /// Returns host function costs and consumes this object. - pub fn take_host_function_costs(self) -> HostFunctionCosts { - self.host_function_costs - } - - /// Returns the limits config for messages. - pub fn messages_limits(&self) -> MessageLimits { - self.messages_limits - } -} - -impl Default for WasmConfig { - fn default() -> Self { - Self { - max_memory: DEFAULT_WASM_MAX_MEMORY, - max_stack_height: DEFAULT_MAX_STACK_HEIGHT, - opcode_costs: OpcodeCosts::default(), - storage_costs: StorageCosts::default(), - host_function_costs: HostFunctionCosts::default(), - messages_limits: MessageLimits::default(), - } - } -} - -impl ToBytes for WasmConfig { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut ret = bytesrepr::unchecked_allocate_buffer(self); - - ret.append(&mut self.max_memory.to_bytes()?); - ret.append(&mut self.max_stack_height.to_bytes()?); - ret.append(&mut self.opcode_costs.to_bytes()?); - ret.append(&mut self.storage_costs.to_bytes()?); - ret.append(&mut self.host_function_costs.to_bytes()?); - ret.append(&mut self.messages_limits.to_bytes()?); - - Ok(ret) - } - - fn serialized_length(&self) -> usize { - self.max_memory.serialized_length() - + self.max_stack_height.serialized_length() - + self.opcode_costs.serialized_length() - + self.storage_costs.serialized_length() - + self.host_function_costs.serialized_length() - + self.messages_limits.serialized_length() - } -} - -impl FromBytes for WasmConfig { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (max_memory, rem) = FromBytes::from_bytes(bytes)?; - let (max_stack_height, rem) = FromBytes::from_bytes(rem)?; - let (opcode_costs, rem) = FromBytes::from_bytes(rem)?; - let (storage_costs, rem) = FromBytes::from_bytes(rem)?; - let (host_function_costs, rem) = FromBytes::from_bytes(rem)?; - let (messages_limits, rem) = FromBytes::from_bytes(rem)?; - - Ok(( - WasmConfig { - max_memory, - max_stack_height, - opcode_costs, - storage_costs, - host_function_costs, - messages_limits, - }, - rem, - )) - } -} - -impl Distribution for Standard { - fn sample(&self, rng: &mut R) -> WasmConfig { - WasmConfig { - max_memory: rng.gen(), - max_stack_height: rng.gen(), - opcode_costs: rng.gen(), - storage_costs: rng.gen(), - host_function_costs: rng.gen(), - messages_limits: rng.gen(), - } - } -} - -#[doc(hidden)] -#[cfg(any(feature = "gens", test))] -pub mod gens { - use proptest::{num, prop_compose}; - - use crate::{ - chainspec::vm_config::{ - host_function_costs::gens::host_function_costs_arb, - message_limits::gens::message_limits_arb, opcode_costs::gens::opcode_costs_arb, - storage_costs::gens::storage_costs_arb, - }, - WasmConfig, - }; - - prop_compose! { - pub fn wasm_config_arb() ( - max_memory in num::u32::ANY, - max_stack_height in num::u32::ANY, - opcode_costs in opcode_costs_arb(), - storage_costs in storage_costs_arb(), - host_function_costs in host_function_costs_arb(), - messages_limits in message_limits_arb(), - ) -> WasmConfig { - WasmConfig { - max_memory, - max_stack_height, - opcode_costs, - storage_costs, - host_function_costs, - messages_limits, - } - } - } -} diff --git a/casper_types_ver_2_0/src/checksummed_hex.rs b/casper_types_ver_2_0/src/checksummed_hex.rs deleted file mode 100644 index 2b7aa193..00000000 --- a/casper_types_ver_2_0/src/checksummed_hex.rs +++ /dev/null @@ -1,241 +0,0 @@ -//! Checksummed hex encoding following an [EIP-55][1]-like scheme. -//! -//! [1]: https://eips.ethereum.org/EIPS/eip-55 - -use alloc::vec::Vec; -use core::ops::RangeInclusive; - -use base16; - -use crate::crypto; - -/// The number of input bytes, at or below which [`decode`] will checksum-decode the output. -pub const SMALL_BYTES_COUNT: usize = 75; - -const HEX_CHARS: [char; 22] = [ - '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f', 'A', 'B', 'C', - 'D', 'E', 'F', -]; - -/// Takes a slice of bytes and breaks it up into a vector of *nibbles* (ie, 4-bit values) -/// represented as `u8`s. -fn bytes_to_nibbles<'a, T: 'a + AsRef<[u8]>>(input: &'a T) -> impl Iterator + 'a { - input - .as_ref() - .iter() - .flat_map(move |byte| [4, 0].iter().map(move |offset| (byte >> offset) & 0x0f)) -} - -/// Takes a slice of bytes and outputs an infinite cyclic stream of bits for those bytes. -fn bytes_to_bits_cycle(bytes: Vec) -> impl Iterator { - bytes - .into_iter() - .cycle() - .flat_map(move |byte| (0..8usize).map(move |offset| ((byte >> offset) & 0x01) == 0x01)) -} - -/// Returns the bytes encoded as hexadecimal with mixed-case based checksums following a scheme -/// similar to [EIP-55](https://eips.ethereum.org/EIPS/eip-55). -/// -/// Key differences: -/// - Works on any length of data, not just 20-byte addresses -/// - Uses Blake2b hashes rather than Keccak -/// - Uses hash bits rather than nibbles -fn encode_iter<'a, T: 'a + AsRef<[u8]>>(input: &'a T) -> impl Iterator + 'a { - let nibbles = bytes_to_nibbles(input); - let mut hash_bits = bytes_to_bits_cycle(crypto::blake2b(input.as_ref()).to_vec()); - nibbles.map(move |mut nibble| { - // Base 16 numbers greater than 10 are represented by the ascii characters a through f. - if nibble >= 10 && hash_bits.next().unwrap_or(true) { - // We are using nibble to index HEX_CHARS, so adding 6 to nibble gives us the index - // of the uppercase character. HEX_CHARS[10] == 'a', HEX_CHARS[16] == 'A'. - nibble += 6; - } - HEX_CHARS[nibble as usize] - }) -} - -/// Returns true if all chars in a string are uppercase or lowercase. -/// Returns false if the string is mixed case or if there are no alphabetic chars. -fn string_is_same_case>(s: T) -> bool { - const LOWER_RANGE: RangeInclusive = b'a'..=b'f'; - const UPPER_RANGE: RangeInclusive = b'A'..=b'F'; - - let mut chars = s - .as_ref() - .iter() - .filter(|c| LOWER_RANGE.contains(c) || UPPER_RANGE.contains(c)); - - match chars.next() { - Some(first) => { - let is_upper = UPPER_RANGE.contains(first); - chars.all(|c| UPPER_RANGE.contains(c) == is_upper) - } - None => { - // String has no actual characters. - true - } - } -} - -/// Decodes a mixed-case hexadecimal string, verifying that it conforms to the checksum scheme -/// similar to scheme in [EIP-55][1]. -/// -/// Key differences: -/// - Works on any length of (decoded) data up to `SMALL_BYTES_COUNT`, not just 20-byte addresses -/// - Uses Blake2b hashes rather than Keccak -/// - Uses hash bits rather than nibbles -/// -/// For backward compatibility: if the hex string is all uppercase or all lowercase, the check is -/// skipped. -/// -/// [1]: https://eips.ethereum.org/EIPS/eip-55 -pub fn decode>(input: T) -> Result, base16::DecodeError> { - let bytes = base16::decode(input.as_ref())?; - - // If the string was not small or not mixed case, don't verify the checksum. - if bytes.len() > SMALL_BYTES_COUNT || string_is_same_case(input.as_ref()) { - return Ok(bytes); - } - - encode_iter(&bytes) - .zip(input.as_ref().iter()) - .enumerate() - .try_for_each(|(index, (expected_case_hex_char, &input_hex_char))| { - if expected_case_hex_char as u8 == input_hex_char { - Ok(()) - } else { - Err(base16::DecodeError::InvalidByte { - index, - byte: expected_case_hex_char as u8, - }) - } - })?; - Ok(bytes) -} - -#[cfg(test)] -mod tests { - use alloc::string::String; - - use proptest::{ - collection::vec, - prelude::{any, prop_assert, prop_assert_eq}, - }; - use proptest_attr_macro::proptest; - - use super::*; - - #[test] - fn should_decode_empty_input() { - let input = String::new(); - let actual = decode(input).unwrap(); - assert!(actual.is_empty()); - } - - #[test] - fn string_is_same_case_true_when_same_case() { - let input = "aaaaaaaaaaa"; - assert!(string_is_same_case(input)); - - let input = "AAAAAAAAAAA"; - assert!(string_is_same_case(input)); - } - - #[test] - fn string_is_same_case_false_when_mixed_case() { - let input = "aAaAaAaAaAa"; - assert!(!string_is_same_case(input)); - } - - #[test] - fn string_is_same_case_no_alphabetic_chars_in_string() { - let input = "424242424242"; - assert!(string_is_same_case(input)); - } - - #[test] - fn should_checksum_decode_only_if_small() { - let input = [255; SMALL_BYTES_COUNT]; - let small_encoded: String = encode_iter(&input).collect(); - assert_eq!(input.to_vec(), decode(&small_encoded).unwrap()); - - assert!(decode("A1a2").is_err()); - - let large_encoded = format!("A1{}", small_encoded); - assert!(decode(large_encoded).is_ok()); - } - - #[proptest] - fn hex_roundtrip(input: Vec) { - prop_assert_eq!( - input.clone(), - decode(encode_iter(&input).collect::()).expect("Failed to decode input.") - ); - } - - proptest::proptest! { - #[test] - fn should_fail_on_invalid_checksum(input in vec(any::(), 0..75)) { - let encoded: String = encode_iter(&input).collect(); - - // Swap the case of the first letter in the checksum hex-encoded value. - let mut expected_error = None; - let mutated: String = encoded - .char_indices() - .map(|(index, mut c)| { - if expected_error.is_some() || c.is_ascii_digit() { - return c; - } - expected_error = Some(base16::DecodeError::InvalidByte { - index, - byte: c as u8, - }); - if c.is_ascii_uppercase() { - c.make_ascii_lowercase(); - } else { - c.make_ascii_uppercase(); - } - c - }) - .collect(); - - // If the encoded form is now all the same case or digits, just return. - if string_is_same_case(&mutated) { - return Ok(()); - } - - // Assert we can still decode to original input using `base16::decode`. - prop_assert_eq!( - input, - base16::decode(&mutated).expect("Failed to decode input.") - ); - - // Assert decoding using `checksummed_hex::decode` returns the expected error. - prop_assert_eq!(expected_error.unwrap(), decode(&mutated).unwrap_err()) - } - } - - #[proptest] - fn hex_roundtrip_sanity(input: Vec) { - prop_assert!(decode(encode_iter(&input).collect::()).is_ok()) - } - - #[proptest] - fn is_same_case_uppercase(input: String) { - let input = input.to_uppercase(); - prop_assert!(string_is_same_case(input)); - } - - #[proptest] - fn is_same_case_lowercase(input: String) { - let input = input.to_lowercase(); - prop_assert!(string_is_same_case(input)); - } - - #[proptest] - fn is_not_same_case(input: String) { - let input = format!("aA{}", input); - prop_assert!(!string_is_same_case(input)); - } -} diff --git a/casper_types_ver_2_0/src/cl_type.rs b/casper_types_ver_2_0/src/cl_type.rs deleted file mode 100644 index 945d6267..00000000 --- a/casper_types_ver_2_0/src/cl_type.rs +++ /dev/null @@ -1,817 +0,0 @@ -use alloc::{ - boxed::Box, - collections::{BTreeMap, BTreeSet, VecDeque}, - string::String, - vec::Vec, -}; -use core::{ - fmt::{self, Display, Formatter}, - mem, -}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -use num_rational::Ratio; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - Key, URef, U128, U256, U512, -}; - -// This must be less than 300 in order to avoid a stack overflow when deserializing. -pub(crate) const CL_TYPE_RECURSION_DEPTH: u8 = 50; - -const CL_TYPE_TAG_BOOL: u8 = 0; -const CL_TYPE_TAG_I32: u8 = 1; -const CL_TYPE_TAG_I64: u8 = 2; -const CL_TYPE_TAG_U8: u8 = 3; -const CL_TYPE_TAG_U32: u8 = 4; -const CL_TYPE_TAG_U64: u8 = 5; -const CL_TYPE_TAG_U128: u8 = 6; -const CL_TYPE_TAG_U256: u8 = 7; -const CL_TYPE_TAG_U512: u8 = 8; -const CL_TYPE_TAG_UNIT: u8 = 9; -const CL_TYPE_TAG_STRING: u8 = 10; -const CL_TYPE_TAG_KEY: u8 = 11; -const CL_TYPE_TAG_UREF: u8 = 12; -const CL_TYPE_TAG_OPTION: u8 = 13; -const CL_TYPE_TAG_LIST: u8 = 14; -const CL_TYPE_TAG_BYTE_ARRAY: u8 = 15; -const CL_TYPE_TAG_RESULT: u8 = 16; -const CL_TYPE_TAG_MAP: u8 = 17; -const CL_TYPE_TAG_TUPLE1: u8 = 18; -const CL_TYPE_TAG_TUPLE2: u8 = 19; -const CL_TYPE_TAG_TUPLE3: u8 = 20; -const CL_TYPE_TAG_ANY: u8 = 21; -const CL_TYPE_TAG_PUBLIC_KEY: u8 = 22; - -/// Casper types, i.e. types which can be stored and manipulated by smart contracts. -/// -/// Provides a description of the underlying data type of a [`CLValue`](crate::CLValue). -#[derive(PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[serde(deny_unknown_fields)] -pub enum CLType { - /// `bool` primitive. - Bool, - /// `i32` primitive. - I32, - /// `i64` primitive. - I64, - /// `u8` primitive. - U8, - /// `u32` primitive. - U32, - /// `u64` primitive. - U64, - /// [`U128`] large unsigned integer type. - U128, - /// [`U256`] large unsigned integer type. - U256, - /// [`U512`] large unsigned integer type. - U512, - /// `()` primitive. - Unit, - /// `String` primitive. - String, - /// [`Key`] system type. - Key, - /// [`URef`] system type. - URef, - /// [`PublicKey`](crate::PublicKey) system type. - PublicKey, - /// `Option` of a `CLType`. - #[cfg_attr(feature = "datasize", data_size(skip))] - Option(Box), - /// Variable-length list of a single `CLType` (comparable to a `Vec`). - #[cfg_attr(feature = "datasize", data_size(skip))] - List(Box), - /// Fixed-length list of a single `CLType` (comparable to a Rust array). - ByteArray(u32), - /// `Result` with `Ok` and `Err` variants of `CLType`s. - #[allow(missing_docs)] // generated docs are explicit enough. - #[cfg_attr(feature = "datasize", data_size(skip))] - Result { ok: Box, err: Box }, - /// Map with keys of a single `CLType` and values of a single `CLType`. - #[allow(missing_docs)] // generated docs are explicit enough. - #[cfg_attr(feature = "datasize", data_size(skip))] - Map { - key: Box, - value: Box, - }, - /// 1-ary tuple of a `CLType`. - #[cfg_attr(feature = "datasize", data_size(skip))] - Tuple1([Box; 1]), - /// 2-ary tuple of `CLType`s. - #[cfg_attr(feature = "datasize", data_size(skip))] - Tuple2([Box; 2]), - /// 3-ary tuple of `CLType`s. - #[cfg_attr(feature = "datasize", data_size(skip))] - Tuple3([Box; 3]), - /// Unspecified type. - Any, -} - -impl CLType { - /// The `len()` of the `Vec` resulting from `self.to_bytes()`. - pub fn serialized_length(&self) -> usize { - mem::size_of::() - + match self { - CLType::Bool - | CLType::I32 - | CLType::I64 - | CLType::U8 - | CLType::U32 - | CLType::U64 - | CLType::U128 - | CLType::U256 - | CLType::U512 - | CLType::Unit - | CLType::String - | CLType::Key - | CLType::URef - | CLType::PublicKey - | CLType::Any => 0, - CLType::Option(cl_type) | CLType::List(cl_type) => cl_type.serialized_length(), - CLType::ByteArray(list_len) => list_len.serialized_length(), - CLType::Result { ok, err } => ok.serialized_length() + err.serialized_length(), - CLType::Map { key, value } => key.serialized_length() + value.serialized_length(), - CLType::Tuple1(cl_type_array) => serialized_length_of_cl_tuple_type(cl_type_array), - CLType::Tuple2(cl_type_array) => serialized_length_of_cl_tuple_type(cl_type_array), - CLType::Tuple3(cl_type_array) => serialized_length_of_cl_tuple_type(cl_type_array), - } - } - - /// Returns `true` if the [`CLType`] is [`Option`]. - pub fn is_option(&self) -> bool { - matches!(self, Self::Option(..)) - } - - /// Creates a `CLType::Map`. - pub fn map(key: CLType, value: CLType) -> Self { - CLType::Map { - key: Box::new(key), - value: Box::new(value), - } - } -} - -/// Returns the `CLType` describing a "named key" on the system, i.e. a `(String, Key)`. -pub fn named_key_type() -> CLType { - CLType::Tuple2([Box::new(CLType::String), Box::new(CLType::Key)]) -} - -impl CLType { - pub(crate) fn append_bytes(&self, stream: &mut Vec) -> Result<(), bytesrepr::Error> { - match self { - CLType::Bool => stream.push(CL_TYPE_TAG_BOOL), - CLType::I32 => stream.push(CL_TYPE_TAG_I32), - CLType::I64 => stream.push(CL_TYPE_TAG_I64), - CLType::U8 => stream.push(CL_TYPE_TAG_U8), - CLType::U32 => stream.push(CL_TYPE_TAG_U32), - CLType::U64 => stream.push(CL_TYPE_TAG_U64), - CLType::U128 => stream.push(CL_TYPE_TAG_U128), - CLType::U256 => stream.push(CL_TYPE_TAG_U256), - CLType::U512 => stream.push(CL_TYPE_TAG_U512), - CLType::Unit => stream.push(CL_TYPE_TAG_UNIT), - CLType::String => stream.push(CL_TYPE_TAG_STRING), - CLType::Key => stream.push(CL_TYPE_TAG_KEY), - CLType::URef => stream.push(CL_TYPE_TAG_UREF), - CLType::PublicKey => stream.push(CL_TYPE_TAG_PUBLIC_KEY), - CLType::Option(cl_type) => { - stream.push(CL_TYPE_TAG_OPTION); - cl_type.append_bytes(stream)?; - } - CLType::List(cl_type) => { - stream.push(CL_TYPE_TAG_LIST); - cl_type.append_bytes(stream)?; - } - CLType::ByteArray(len) => { - stream.push(CL_TYPE_TAG_BYTE_ARRAY); - stream.append(&mut len.to_bytes()?); - } - CLType::Result { ok, err } => { - stream.push(CL_TYPE_TAG_RESULT); - ok.append_bytes(stream)?; - err.append_bytes(stream)?; - } - CLType::Map { key, value } => { - stream.push(CL_TYPE_TAG_MAP); - key.append_bytes(stream)?; - value.append_bytes(stream)?; - } - CLType::Tuple1(cl_type_array) => { - serialize_cl_tuple_type(CL_TYPE_TAG_TUPLE1, cl_type_array, stream)? - } - CLType::Tuple2(cl_type_array) => { - serialize_cl_tuple_type(CL_TYPE_TAG_TUPLE2, cl_type_array, stream)? - } - CLType::Tuple3(cl_type_array) => { - serialize_cl_tuple_type(CL_TYPE_TAG_TUPLE3, cl_type_array, stream)? - } - CLType::Any => stream.push(CL_TYPE_TAG_ANY), - } - Ok(()) - } -} - -impl Display for CLType { - fn fmt(&self, formatter: &mut Formatter<'_>) -> fmt::Result { - match self { - CLType::Bool => write!(formatter, "bool"), - CLType::I32 => write!(formatter, "i32"), - CLType::I64 => write!(formatter, "i64"), - CLType::U8 => write!(formatter, "u8"), - CLType::U32 => write!(formatter, "u32"), - CLType::U64 => write!(formatter, "u64"), - CLType::U128 => write!(formatter, "u128"), - CLType::U256 => write!(formatter, "u256"), - CLType::U512 => write!(formatter, "u512"), - CLType::Unit => write!(formatter, "unit"), - CLType::String => write!(formatter, "string"), - CLType::Key => write!(formatter, "key"), - CLType::URef => write!(formatter, "uref"), - CLType::PublicKey => write!(formatter, "public-key"), - CLType::Option(t) => write!(formatter, "option<{t}>"), - CLType::List(t) => write!(formatter, "list<{t}>"), - CLType::ByteArray(len) => write!(formatter, "byte-array[{len}]"), - CLType::Result { ok, err } => write!(formatter, "result<{ok}, {err}>"), - CLType::Map { key, value } => write!(formatter, "map<{key}, {value}>"), - CLType::Tuple1([t1]) => write!(formatter, "({t1},)"), - CLType::Tuple2([t1, t2]) => write!(formatter, "({t1}, {t2})"), - CLType::Tuple3([t1, t2, t3]) => write!(formatter, "({t1}, {t2}, {t3})"), - CLType::Any => write!(formatter, "any"), - } - } -} - -impl FromBytes for CLType { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - depth_limited_from_bytes(0, bytes) - } -} - -fn depth_limited_from_bytes(depth: u8, bytes: &[u8]) -> Result<(CLType, &[u8]), bytesrepr::Error> { - if depth >= CL_TYPE_RECURSION_DEPTH { - return Err(bytesrepr::Error::ExceededRecursionDepth); - } - let depth = depth + 1; - let (tag, remainder) = u8::from_bytes(bytes)?; - match tag { - CL_TYPE_TAG_BOOL => Ok((CLType::Bool, remainder)), - CL_TYPE_TAG_I32 => Ok((CLType::I32, remainder)), - CL_TYPE_TAG_I64 => Ok((CLType::I64, remainder)), - CL_TYPE_TAG_U8 => Ok((CLType::U8, remainder)), - CL_TYPE_TAG_U32 => Ok((CLType::U32, remainder)), - CL_TYPE_TAG_U64 => Ok((CLType::U64, remainder)), - CL_TYPE_TAG_U128 => Ok((CLType::U128, remainder)), - CL_TYPE_TAG_U256 => Ok((CLType::U256, remainder)), - CL_TYPE_TAG_U512 => Ok((CLType::U512, remainder)), - CL_TYPE_TAG_UNIT => Ok((CLType::Unit, remainder)), - CL_TYPE_TAG_STRING => Ok((CLType::String, remainder)), - CL_TYPE_TAG_KEY => Ok((CLType::Key, remainder)), - CL_TYPE_TAG_UREF => Ok((CLType::URef, remainder)), - CL_TYPE_TAG_PUBLIC_KEY => Ok((CLType::PublicKey, remainder)), - CL_TYPE_TAG_OPTION => { - let (inner_type, remainder) = depth_limited_from_bytes(depth, remainder)?; - let cl_type = CLType::Option(Box::new(inner_type)); - Ok((cl_type, remainder)) - } - CL_TYPE_TAG_LIST => { - let (inner_type, remainder) = depth_limited_from_bytes(depth, remainder)?; - let cl_type = CLType::List(Box::new(inner_type)); - Ok((cl_type, remainder)) - } - CL_TYPE_TAG_BYTE_ARRAY => { - let (len, remainder) = u32::from_bytes(remainder)?; - let cl_type = CLType::ByteArray(len); - Ok((cl_type, remainder)) - } - CL_TYPE_TAG_RESULT => { - let (ok_type, remainder) = depth_limited_from_bytes(depth, remainder)?; - let (err_type, remainder) = depth_limited_from_bytes(depth, remainder)?; - let cl_type = CLType::Result { - ok: Box::new(ok_type), - err: Box::new(err_type), - }; - Ok((cl_type, remainder)) - } - CL_TYPE_TAG_MAP => { - let (key_type, remainder) = depth_limited_from_bytes(depth, remainder)?; - let (value_type, remainder) = depth_limited_from_bytes(depth, remainder)?; - let cl_type = CLType::Map { - key: Box::new(key_type), - value: Box::new(value_type), - }; - Ok((cl_type, remainder)) - } - CL_TYPE_TAG_TUPLE1 => { - let (mut inner_types, remainder) = parse_cl_tuple_types(depth, 1, remainder)?; - // NOTE: Assumed safe as `parse_cl_tuple_types` is expected to have exactly 1 - // element - let cl_type = CLType::Tuple1([inner_types.pop_front().unwrap()]); - Ok((cl_type, remainder)) - } - CL_TYPE_TAG_TUPLE2 => { - let (mut inner_types, remainder) = parse_cl_tuple_types(depth, 2, remainder)?; - // NOTE: Assumed safe as `parse_cl_tuple_types` is expected to have exactly 2 - // elements - let cl_type = CLType::Tuple2([ - inner_types.pop_front().unwrap(), - inner_types.pop_front().unwrap(), - ]); - Ok((cl_type, remainder)) - } - CL_TYPE_TAG_TUPLE3 => { - let (mut inner_types, remainder) = parse_cl_tuple_types(depth, 3, remainder)?; - // NOTE: Assumed safe as `parse_cl_tuple_types` is expected to have exactly 3 - // elements - let cl_type = CLType::Tuple3([ - inner_types.pop_front().unwrap(), - inner_types.pop_front().unwrap(), - inner_types.pop_front().unwrap(), - ]); - Ok((cl_type, remainder)) - } - CL_TYPE_TAG_ANY => Ok((CLType::Any, remainder)), - _ => Err(bytesrepr::Error::Formatting), - } -} - -fn serialize_cl_tuple_type<'a, T: IntoIterator>>( - tag: u8, - cl_type_array: T, - stream: &mut Vec, -) -> Result<(), bytesrepr::Error> { - stream.push(tag); - for cl_type in cl_type_array { - cl_type.append_bytes(stream)?; - } - Ok(()) -} - -fn parse_cl_tuple_types( - depth: u8, - count: usize, - mut bytes: &[u8], -) -> Result<(VecDeque>, &[u8]), bytesrepr::Error> { - let mut cl_types = VecDeque::with_capacity(count); - for _ in 0..count { - let (cl_type, remainder) = depth_limited_from_bytes(depth, bytes)?; - cl_types.push_back(Box::new(cl_type)); - bytes = remainder; - } - - Ok((cl_types, bytes)) -} - -fn serialized_length_of_cl_tuple_type<'a, T: IntoIterator>>( - cl_type_array: T, -) -> usize { - cl_type_array - .into_iter() - .map(|cl_type| cl_type.serialized_length()) - .sum() -} - -/// A type which can be described as a [`CLType`]. -pub trait CLTyped { - /// The `CLType` of `Self`. - fn cl_type() -> CLType; -} - -impl CLTyped for bool { - fn cl_type() -> CLType { - CLType::Bool - } -} - -impl CLTyped for i32 { - fn cl_type() -> CLType { - CLType::I32 - } -} - -impl CLTyped for i64 { - fn cl_type() -> CLType { - CLType::I64 - } -} - -impl CLTyped for u8 { - fn cl_type() -> CLType { - CLType::U8 - } -} - -impl CLTyped for u32 { - fn cl_type() -> CLType { - CLType::U32 - } -} - -impl CLTyped for u64 { - fn cl_type() -> CLType { - CLType::U64 - } -} - -impl CLTyped for U128 { - fn cl_type() -> CLType { - CLType::U128 - } -} - -impl CLTyped for U256 { - fn cl_type() -> CLType { - CLType::U256 - } -} - -impl CLTyped for U512 { - fn cl_type() -> CLType { - CLType::U512 - } -} - -impl CLTyped for () { - fn cl_type() -> CLType { - CLType::Unit - } -} - -impl CLTyped for String { - fn cl_type() -> CLType { - CLType::String - } -} - -impl CLTyped for &str { - fn cl_type() -> CLType { - CLType::String - } -} - -impl CLTyped for Key { - fn cl_type() -> CLType { - CLType::Key - } -} - -impl CLTyped for URef { - fn cl_type() -> CLType { - CLType::URef - } -} - -impl CLTyped for Option { - fn cl_type() -> CLType { - CLType::Option(Box::new(T::cl_type())) - } -} - -impl CLTyped for Vec { - fn cl_type() -> CLType { - CLType::List(Box::new(T::cl_type())) - } -} - -impl CLTyped for BTreeSet { - fn cl_type() -> CLType { - CLType::List(Box::new(T::cl_type())) - } -} - -impl CLTyped for &T { - fn cl_type() -> CLType { - T::cl_type() - } -} - -impl CLTyped for [u8; COUNT] { - fn cl_type() -> CLType { - CLType::ByteArray(COUNT as u32) - } -} - -impl CLTyped for Result { - fn cl_type() -> CLType { - let ok = Box::new(T::cl_type()); - let err = Box::new(E::cl_type()); - CLType::Result { ok, err } - } -} - -impl CLTyped for BTreeMap { - fn cl_type() -> CLType { - let key = Box::new(K::cl_type()); - let value = Box::new(V::cl_type()); - CLType::Map { key, value } - } -} - -impl CLTyped for (T1,) { - fn cl_type() -> CLType { - CLType::Tuple1([Box::new(T1::cl_type())]) - } -} - -impl CLTyped for (T1, T2) { - fn cl_type() -> CLType { - CLType::Tuple2([Box::new(T1::cl_type()), Box::new(T2::cl_type())]) - } -} - -impl CLTyped for (T1, T2, T3) { - fn cl_type() -> CLType { - CLType::Tuple3([ - Box::new(T1::cl_type()), - Box::new(T2::cl_type()), - Box::new(T3::cl_type()), - ]) - } -} - -impl CLTyped for Ratio { - fn cl_type() -> CLType { - <(T, T)>::cl_type() - } -} - -#[cfg(test)] -mod tests { - use std::{fmt::Debug, iter, string::ToString}; - - use super::*; - use crate::{ - bytesrepr::{FromBytes, ToBytes}, - AccessRights, CLValue, - }; - - fn round_trip(value: &T) { - let cl_value = CLValue::from_t(value.clone()).unwrap(); - - let serialized_cl_value = cl_value.to_bytes().unwrap(); - assert_eq!(serialized_cl_value.len(), cl_value.serialized_length()); - let parsed_cl_value: CLValue = bytesrepr::deserialize(serialized_cl_value).unwrap(); - assert_eq!(cl_value, parsed_cl_value); - - let parsed_value = CLValue::into_t(cl_value).unwrap(); - assert_eq!(*value, parsed_value); - } - - #[test] - fn bool_should_work() { - round_trip(&true); - round_trip(&false); - } - - #[test] - fn u8_should_work() { - round_trip(&1u8); - } - - #[test] - fn u32_should_work() { - round_trip(&1u32); - } - - #[test] - fn i32_should_work() { - round_trip(&-1i32); - } - - #[test] - fn u64_should_work() { - round_trip(&1u64); - } - - #[test] - fn i64_should_work() { - round_trip(&-1i64); - } - - #[test] - fn u128_should_work() { - round_trip(&U128::one()); - } - - #[test] - fn u256_should_work() { - round_trip(&U256::one()); - } - - #[test] - fn u512_should_work() { - round_trip(&U512::one()); - } - - #[test] - fn unit_should_work() { - round_trip(&()); - } - - #[test] - fn string_should_work() { - round_trip(&String::from("abc")); - } - - #[test] - fn key_should_work() { - let key = Key::URef(URef::new([0u8; 32], AccessRights::READ_ADD_WRITE)); - round_trip(&key); - } - - #[test] - fn uref_should_work() { - let uref = URef::new([0u8; 32], AccessRights::READ_ADD_WRITE); - round_trip(&uref); - } - - #[test] - fn option_of_cl_type_should_work() { - let x: Option = Some(-1); - let y: Option = None; - - round_trip(&x); - round_trip(&y); - } - - #[test] - fn vec_of_cl_type_should_work() { - let vec = vec![String::from("a"), String::from("b")]; - round_trip(&vec); - } - - #[test] - #[allow(clippy::cognitive_complexity)] - fn small_array_of_u8_should_work() { - macro_rules! test_small_array { - ($($N:literal)+) => { - $( - let mut array: [u8; $N] = Default::default(); - for i in 0..$N { - array[i] = i as u8; - } - round_trip(&array); - )+ - } - } - - test_small_array! { - 1 2 3 4 5 6 7 8 9 - 10 11 12 13 14 15 16 17 18 19 - 20 21 22 23 24 25 26 27 28 29 - 30 31 32 - } - } - - #[test] - fn large_array_of_cl_type_should_work() { - macro_rules! test_large_array { - ($($N:literal)+) => { - $( - let array = { - let mut tmp = [0u8; $N]; - for i in 0..$N { - tmp[i] = i as u8; - } - tmp - }; - - let cl_value = CLValue::from_t(array.clone()).unwrap(); - - let serialized_cl_value = cl_value.to_bytes().unwrap(); - let parsed_cl_value: CLValue = bytesrepr::deserialize(serialized_cl_value).unwrap(); - assert_eq!(cl_value, parsed_cl_value); - - let parsed_value: [u8; $N] = CLValue::into_t(cl_value).unwrap(); - for i in 0..$N { - assert_eq!(array[i], parsed_value[i]); - } - )+ - } - } - - test_large_array! { 64 128 256 512 } - } - - #[test] - fn result_of_cl_type_should_work() { - let x: Result<(), String> = Ok(()); - let y: Result<(), String> = Err(String::from("Hello, world!")); - - round_trip(&x); - round_trip(&y); - } - - #[test] - fn map_of_cl_type_should_work() { - let mut map: BTreeMap = BTreeMap::new(); - map.insert(String::from("abc"), 1); - map.insert(String::from("xyz"), 2); - - round_trip(&map); - } - - #[test] - fn tuple_1_should_work() { - let x = (-1i32,); - - round_trip(&x); - } - - #[test] - fn tuple_2_should_work() { - let x = (-1i32, String::from("a")); - - round_trip(&x); - } - - #[test] - fn tuple_3_should_work() { - let x = (-1i32, 1u32, String::from("a")); - - round_trip(&x); - } - - #[test] - fn parsing_nested_tuple_1_cltype_should_not_stack_overflow() { - // The bytesrepr representation of the CLType for a - // nested (((...((),),...),),) looks like: - // [18, 18, 18, ..., 9] - - for i in 1..1000 { - let bytes = iter::repeat(CL_TYPE_TAG_TUPLE1) - .take(i) - .chain(iter::once(CL_TYPE_TAG_UNIT)) - .collect(); - match bytesrepr::deserialize(bytes) { - Ok(parsed_cltype) => assert!(matches!(parsed_cltype, CLType::Tuple1(_))), - Err(error) => assert_eq!(error, bytesrepr::Error::ExceededRecursionDepth), - } - } - } - - #[test] - fn parsing_nested_tuple_1_value_should_not_stack_overflow() { - // The bytesrepr representation of the CLValue for a - // nested (((...((),),...),),) looks like: - // [0, 0, 0, 0, 18, 18, 18, ..., 18, 9] - - for i in 1..1000 { - let bytes = iter::repeat(0) - .take(4) - .chain(iter::repeat(CL_TYPE_TAG_TUPLE1).take(i)) - .chain(iter::once(CL_TYPE_TAG_UNIT)) - .collect(); - match bytesrepr::deserialize::(bytes) { - Ok(parsed_clvalue) => { - assert!(matches!(parsed_clvalue.cl_type(), CLType::Tuple1(_))) - } - Err(error) => assert_eq!(error, bytesrepr::Error::ExceededRecursionDepth), - } - } - } - - #[test] - fn any_should_work() { - #[derive(PartialEq, Debug, Clone)] - struct Any(String); - - impl CLTyped for Any { - fn cl_type() -> CLType { - CLType::Any - } - } - - impl ToBytes for Any { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - self.0.to_bytes() - } - - fn serialized_length(&self) -> usize { - self.0.serialized_length() - } - } - - impl FromBytes for Any { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (inner, remainder) = String::from_bytes(bytes)?; - Ok((Any(inner), remainder)) - } - } - - let any = Any("Any test".to_string()); - round_trip(&any); - } - - #[test] - fn should_have_cltype_of_ref_to_cltyped() { - assert_eq!(>::cl_type(), >::cl_type()) - } -} diff --git a/casper_types_ver_2_0/src/cl_value.rs b/casper_types_ver_2_0/src/cl_value.rs deleted file mode 100644 index 7e6732d1..00000000 --- a/casper_types_ver_2_0/src/cl_value.rs +++ /dev/null @@ -1,1208 +0,0 @@ -use alloc::{string::String, vec::Vec}; -use core::fmt::{self, Display, Formatter}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(feature = "json-schema")] -use schemars::{gen::SchemaGenerator, schema::Schema, JsonSchema}; -use serde::{de::Error as SerdeError, Deserialize, Deserializer, Serialize, Serializer}; -use serde_json::Value; - -use crate::{ - bytesrepr::{self, Bytes, FromBytes, ToBytes, U32_SERIALIZED_LENGTH}, - checksummed_hex, CLType, CLTyped, -}; - -mod jsonrepr; - -/// Error while converting a [`CLValue`] into a given type. -#[derive(PartialEq, Eq, Clone, Debug, Serialize, Deserialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -pub struct CLTypeMismatch { - /// The [`CLType`] into which the `CLValue` was being converted. - pub expected: CLType, - /// The actual underlying [`CLType`] of this `CLValue`, i.e. the type from which it was - /// constructed. - pub found: CLType, -} - -impl Display for CLTypeMismatch { - fn fmt(&self, f: &mut Formatter) -> fmt::Result { - write!( - f, - "Expected {:?} but found {:?}.", - self.expected, self.found - ) - } -} - -/// Error relating to [`CLValue`] operations. -#[derive(PartialEq, Eq, Clone, Debug, Serialize, Deserialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -pub enum CLValueError { - /// An error while serializing or deserializing the underlying data. - Serialization(bytesrepr::Error), - /// A type mismatch while trying to convert a [`CLValue`] into a given type. - Type(CLTypeMismatch), -} - -impl From for CLValueError { - fn from(error: bytesrepr::Error) -> Self { - CLValueError::Serialization(error) - } -} - -impl Display for CLValueError { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - match self { - CLValueError::Serialization(error) => write!(formatter, "CLValue error: {}", error), - CLValueError::Type(error) => write!(formatter, "Type mismatch: {}", error), - } - } -} - -/// A Casper value, i.e. a value which can be stored and manipulated by smart contracts. -/// -/// It holds the underlying data as a type-erased, serialized `Vec` and also holds the -/// [`CLType`] of the underlying data as a separate member. -#[derive(PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -pub struct CLValue { - cl_type: CLType, - bytes: Bytes, -} - -impl CLValue { - /// Constructs a `CLValue` from `t`. - pub fn from_t(t: T) -> Result { - let bytes = t.into_bytes()?; - - Ok(CLValue { - cl_type: T::cl_type(), - bytes: bytes.into(), - }) - } - - /// Converts `self` into its underlying type. - pub fn to_t(&self) -> Result { - let expected = T::cl_type(); - - if self.cl_type == expected { - Ok(bytesrepr::deserialize_from_slice(&self.bytes)?) - } else { - Err(CLValueError::Type(CLTypeMismatch { - expected, - found: self.cl_type.clone(), - })) - } - } - - /// Consumes and converts `self` back into its underlying type. - pub fn into_t(self) -> Result { - let expected = T::cl_type(); - - if self.cl_type == expected { - Ok(bytesrepr::deserialize_from_slice(&self.bytes)?) - } else { - Err(CLValueError::Type(CLTypeMismatch { - expected, - found: self.cl_type, - })) - } - } - - /// A convenience method to create CLValue for a unit. - pub fn unit() -> Self { - CLValue::from_components(CLType::Unit, Vec::new()) - } - - // This is only required in order to implement `TryFrom for CLValue` (i.e. the - // conversion from the Protobuf `CLValue`) in a separate module to this one. - #[doc(hidden)] - pub fn from_components(cl_type: CLType, bytes: Vec) -> Self { - Self { - cl_type, - bytes: bytes.into(), - } - } - - // This is only required in order to implement `From for state::CLValue` (i.e. the - // conversion to the Protobuf `CLValue`) in a separate module to this one. - #[doc(hidden)] - pub fn destructure(self) -> (CLType, Bytes) { - (self.cl_type, self.bytes) - } - - /// The [`CLType`] of the underlying data. - pub fn cl_type(&self) -> &CLType { - &self.cl_type - } - - /// Returns a reference to the serialized form of the underlying value held in this `CLValue`. - pub fn inner_bytes(&self) -> &Vec { - self.bytes.inner_bytes() - } - - /// Returns the length of the `Vec` yielded after calling `self.to_bytes()`. - /// - /// Note, this method doesn't actually serialize `self`, and hence is relatively cheap. - pub fn serialized_length(&self) -> usize { - self.cl_type.serialized_length() + U32_SERIALIZED_LENGTH + self.bytes.len() - } -} - -impl ToBytes for CLValue { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - self.clone().into_bytes() - } - - fn into_bytes(self) -> Result, bytesrepr::Error> { - let mut result = self.bytes.into_bytes()?; - self.cl_type.append_bytes(&mut result)?; - Ok(result) - } - - fn serialized_length(&self) -> usize { - self.bytes.serialized_length() + self.cl_type.serialized_length() - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.bytes.write_bytes(writer)?; - self.cl_type.append_bytes(writer)?; - Ok(()) - } -} - -impl FromBytes for CLValue { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (bytes, remainder) = FromBytes::from_bytes(bytes)?; - let (cl_type, remainder) = FromBytes::from_bytes(remainder)?; - let cl_value = CLValue { cl_type, bytes }; - Ok((cl_value, remainder)) - } -} - -/// We need to implement `JsonSchema` for `CLValue` as though it is a `CLValueJson`. -#[cfg(feature = "json-schema")] -impl JsonSchema for CLValue { - fn schema_name() -> String { - "CLValue".to_string() - } - - fn json_schema(gen: &mut SchemaGenerator) -> Schema { - ::json_schema(gen) - } -} - -/// A Casper value, i.e. a value which can be stored and manipulated by smart contracts. -/// -/// It holds the underlying data as a type-erased, serialized `Vec` and also holds the CLType of -/// the underlying data as a separate member. -/// -/// The `parsed` field, representing the original value, is a convenience only available when a -/// CLValue is encoded to JSON, and can always be set to null if preferred. -#[derive(Serialize, Deserialize)] -#[serde(deny_unknown_fields)] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[cfg_attr(feature = "json-schema", schemars(rename = "CLValue"))] -struct CLValueJson { - cl_type: CLType, - bytes: String, - parsed: Option, -} - -impl Serialize for CLValue { - fn serialize(&self, serializer: S) -> Result { - if serializer.is_human_readable() { - CLValueJson { - cl_type: self.cl_type.clone(), - bytes: base16::encode_lower(&self.bytes), - parsed: jsonrepr::cl_value_to_json(self), - } - .serialize(serializer) - } else { - (&self.cl_type, &self.bytes).serialize(serializer) - } - } -} - -impl<'de> Deserialize<'de> for CLValue { - fn deserialize>(deserializer: D) -> Result { - let (cl_type, bytes) = if deserializer.is_human_readable() { - let json = CLValueJson::deserialize(deserializer)?; - ( - json.cl_type.clone(), - checksummed_hex::decode(&json.bytes).map_err(D::Error::custom)?, - ) - } else { - <(CLType, Vec)>::deserialize(deserializer)? - }; - Ok(CLValue { - cl_type, - bytes: bytes.into(), - }) - } -} - -#[cfg(test)] -mod tests { - use alloc::string::ToString; - - #[cfg(feature = "json-schema")] - use schemars::schema_for; - - use super::*; - use crate::{ - account::{AccountHash, ACCOUNT_HASH_LENGTH}, - key::KEY_HASH_LENGTH, - AccessRights, DeployHash, Digest, Key, PublicKey, TransferAddr, URef, TRANSFER_ADDR_LENGTH, - U128, U256, U512, UREF_ADDR_LENGTH, - }; - - #[cfg(feature = "json-schema")] - #[test] - fn json_schema() { - let json_clvalue_schema = schema_for!(CLValueJson); - let clvalue_schema = schema_for!(CLValue); - assert_eq!(json_clvalue_schema, clvalue_schema); - } - - #[test] - fn serde_roundtrip() { - let cl_value = CLValue::from_t(true).unwrap(); - let serialized = bincode::serialize(&cl_value).unwrap(); - let decoded = bincode::deserialize(&serialized).unwrap(); - assert_eq!(cl_value, decoded); - } - - #[test] - fn json_roundtrip() { - let cl_value = CLValue::from_t(true).unwrap(); - let json_string = serde_json::to_string_pretty(&cl_value).unwrap(); - let decoded = serde_json::from_str(&json_string).unwrap(); - assert_eq!(cl_value, decoded); - } - - fn check_to_json(value: T, expected: &str) { - let cl_value = CLValue::from_t(value).unwrap(); - let cl_value_as_json = serde_json::to_string(&cl_value).unwrap(); - // Remove the `serialized_bytes` field: - // Split the string at `,"serialized_bytes":`. - let pattern = r#","bytes":""#; - let start_index = cl_value_as_json.find(pattern).unwrap(); - let (start, end) = cl_value_as_json.split_at(start_index); - // Find the end of the value of the `bytes` field, and split there. - let mut json_without_serialize_bytes = start.to_string(); - for (index, char) in end.char_indices().skip(pattern.len()) { - if char == '"' { - let (_to_remove, to_keep) = end.split_at(index + 1); - json_without_serialize_bytes.push_str(to_keep); - break; - } - } - assert_eq!(json_without_serialize_bytes, expected); - } - - mod simple_types { - use super::*; - use crate::crypto::SecretKey; - - #[test] - fn bool_cl_value_should_encode_to_json() { - check_to_json(true, r#"{"cl_type":"Bool","parsed":true}"#); - check_to_json(false, r#"{"cl_type":"Bool","parsed":false}"#); - } - - #[test] - fn i32_cl_value_should_encode_to_json() { - check_to_json( - i32::min_value(), - r#"{"cl_type":"I32","parsed":-2147483648}"#, - ); - check_to_json(0_i32, r#"{"cl_type":"I32","parsed":0}"#); - check_to_json(i32::max_value(), r#"{"cl_type":"I32","parsed":2147483647}"#); - } - - #[test] - fn i64_cl_value_should_encode_to_json() { - check_to_json( - i64::min_value(), - r#"{"cl_type":"I64","parsed":-9223372036854775808}"#, - ); - check_to_json(0_i64, r#"{"cl_type":"I64","parsed":0}"#); - check_to_json( - i64::max_value(), - r#"{"cl_type":"I64","parsed":9223372036854775807}"#, - ); - } - - #[test] - fn u8_cl_value_should_encode_to_json() { - check_to_json(0_u8, r#"{"cl_type":"U8","parsed":0}"#); - check_to_json(u8::max_value(), r#"{"cl_type":"U8","parsed":255}"#); - } - - #[test] - fn u32_cl_value_should_encode_to_json() { - check_to_json(0_u32, r#"{"cl_type":"U32","parsed":0}"#); - check_to_json(u32::max_value(), r#"{"cl_type":"U32","parsed":4294967295}"#); - } - - #[test] - fn u64_cl_value_should_encode_to_json() { - check_to_json(0_u64, r#"{"cl_type":"U64","parsed":0}"#); - check_to_json( - u64::max_value(), - r#"{"cl_type":"U64","parsed":18446744073709551615}"#, - ); - } - - #[test] - fn u128_cl_value_should_encode_to_json() { - check_to_json(U128::zero(), r#"{"cl_type":"U128","parsed":"0"}"#); - check_to_json( - U128::max_value(), - r#"{"cl_type":"U128","parsed":"340282366920938463463374607431768211455"}"#, - ); - } - - #[test] - fn u256_cl_value_should_encode_to_json() { - check_to_json(U256::zero(), r#"{"cl_type":"U256","parsed":"0"}"#); - check_to_json( - U256::max_value(), - r#"{"cl_type":"U256","parsed":"115792089237316195423570985008687907853269984665640564039457584007913129639935"}"#, - ); - } - - #[test] - fn u512_cl_value_should_encode_to_json() { - check_to_json(U512::zero(), r#"{"cl_type":"U512","parsed":"0"}"#); - check_to_json( - U512::max_value(), - r#"{"cl_type":"U512","parsed":"13407807929942597099574024998205846127479365820592393377723561443721764030073546976801874298166903427690031858186486050853753882811946569946433649006084095"}"#, - ); - } - - #[test] - fn unit_cl_value_should_encode_to_json() { - check_to_json((), r#"{"cl_type":"Unit","parsed":null}"#); - } - - #[test] - fn string_cl_value_should_encode_to_json() { - check_to_json(String::new(), r#"{"cl_type":"String","parsed":""}"#); - check_to_json( - "test string".to_string(), - r#"{"cl_type":"String","parsed":"test string"}"#, - ); - } - - #[test] - fn key_cl_value_should_encode_to_json() { - let key_account = Key::Account(AccountHash::new([1; ACCOUNT_HASH_LENGTH])); - check_to_json( - key_account, - r#"{"cl_type":"Key","parsed":"account-hash-0101010101010101010101010101010101010101010101010101010101010101"}"#, - ); - - let key_hash = Key::Hash([2; KEY_HASH_LENGTH]); - check_to_json( - key_hash, - r#"{"cl_type":"Key","parsed":"hash-0202020202020202020202020202020202020202020202020202020202020202"}"#, - ); - - let key_uref = Key::URef(URef::new([3; UREF_ADDR_LENGTH], AccessRights::READ)); - check_to_json( - key_uref, - r#"{"cl_type":"Key","parsed":"uref-0303030303030303030303030303030303030303030303030303030303030303-001"}"#, - ); - - let key_transfer = Key::Transfer(TransferAddr::new([4; TRANSFER_ADDR_LENGTH])); - check_to_json( - key_transfer, - r#"{"cl_type":"Key","parsed":"transfer-0404040404040404040404040404040404040404040404040404040404040404"}"#, - ); - - let key_deploy_info = Key::DeployInfo(DeployHash::from_raw([5; Digest::LENGTH])); - check_to_json( - key_deploy_info, - r#"{"cl_type":"Key","parsed":"deploy-0505050505050505050505050505050505050505050505050505050505050505"}"#, - ); - } - - #[test] - fn uref_cl_value_should_encode_to_json() { - let uref = URef::new([6; UREF_ADDR_LENGTH], AccessRights::READ_ADD_WRITE); - check_to_json( - uref, - r#"{"cl_type":"URef","parsed":"uref-0606060606060606060606060606060606060606060606060606060606060606-007"}"#, - ); - } - - #[test] - fn public_key_cl_value_should_encode_to_json() { - check_to_json( - PublicKey::from( - &SecretKey::ed25519_from_bytes([7; SecretKey::ED25519_LENGTH]).unwrap(), - ), - r#"{"cl_type":"PublicKey","parsed":"01ea4a6c63e29c520abef5507b132ec5f9954776aebebe7b92421eea691446d22c"}"#, - ); - check_to_json( - PublicKey::from( - &SecretKey::secp256k1_from_bytes([8; SecretKey::SECP256K1_LENGTH]).unwrap(), - ), - r#"{"cl_type":"PublicKey","parsed":"0203f991f944d1e1954a7fc8b9bf62e0d78f015f4c07762d505e20e6c45260a3661b"}"#, - ); - } - } - - mod option { - use super::*; - use crate::crypto::SecretKey; - - #[test] - fn bool_cl_value_should_encode_to_json() { - check_to_json(Some(true), r#"{"cl_type":{"Option":"Bool"},"parsed":true}"#); - check_to_json( - Some(false), - r#"{"cl_type":{"Option":"Bool"},"parsed":false}"#, - ); - check_to_json( - Option::::None, - r#"{"cl_type":{"Option":"Bool"},"parsed":null}"#, - ); - } - - #[test] - fn i32_cl_value_should_encode_to_json() { - check_to_json( - Some(i32::min_value()), - r#"{"cl_type":{"Option":"I32"},"parsed":-2147483648}"#, - ); - check_to_json(Some(0_i32), r#"{"cl_type":{"Option":"I32"},"parsed":0}"#); - check_to_json( - Some(i32::max_value()), - r#"{"cl_type":{"Option":"I32"},"parsed":2147483647}"#, - ); - check_to_json( - Option::::None, - r#"{"cl_type":{"Option":"I32"},"parsed":null}"#, - ); - } - - #[test] - fn i64_cl_value_should_encode_to_json() { - check_to_json( - Some(i64::min_value()), - r#"{"cl_type":{"Option":"I64"},"parsed":-9223372036854775808}"#, - ); - check_to_json(Some(0_i64), r#"{"cl_type":{"Option":"I64"},"parsed":0}"#); - check_to_json( - Some(i64::max_value()), - r#"{"cl_type":{"Option":"I64"},"parsed":9223372036854775807}"#, - ); - check_to_json( - Option::::None, - r#"{"cl_type":{"Option":"I64"},"parsed":null}"#, - ); - } - - #[test] - fn u8_cl_value_should_encode_to_json() { - check_to_json(Some(0_u8), r#"{"cl_type":{"Option":"U8"},"parsed":0}"#); - check_to_json( - Some(u8::max_value()), - r#"{"cl_type":{"Option":"U8"},"parsed":255}"#, - ); - check_to_json( - Option::::None, - r#"{"cl_type":{"Option":"U8"},"parsed":null}"#, - ); - } - - #[test] - fn u32_cl_value_should_encode_to_json() { - check_to_json(Some(0_u32), r#"{"cl_type":{"Option":"U32"},"parsed":0}"#); - check_to_json( - Some(u32::max_value()), - r#"{"cl_type":{"Option":"U32"},"parsed":4294967295}"#, - ); - check_to_json( - Option::::None, - r#"{"cl_type":{"Option":"U32"},"parsed":null}"#, - ); - } - - #[test] - fn u64_cl_value_should_encode_to_json() { - check_to_json(Some(0_u64), r#"{"cl_type":{"Option":"U64"},"parsed":0}"#); - check_to_json( - Some(u64::max_value()), - r#"{"cl_type":{"Option":"U64"},"parsed":18446744073709551615}"#, - ); - check_to_json( - Option::::None, - r#"{"cl_type":{"Option":"U64"},"parsed":null}"#, - ); - } - - #[test] - fn u128_cl_value_should_encode_to_json() { - check_to_json( - Some(U128::zero()), - r#"{"cl_type":{"Option":"U128"},"parsed":"0"}"#, - ); - check_to_json( - Some(U128::max_value()), - r#"{"cl_type":{"Option":"U128"},"parsed":"340282366920938463463374607431768211455"}"#, - ); - check_to_json( - Option::::None, - r#"{"cl_type":{"Option":"U128"},"parsed":null}"#, - ); - } - - #[test] - fn u256_cl_value_should_encode_to_json() { - check_to_json( - Some(U256::zero()), - r#"{"cl_type":{"Option":"U256"},"parsed":"0"}"#, - ); - check_to_json( - Some(U256::max_value()), - r#"{"cl_type":{"Option":"U256"},"parsed":"115792089237316195423570985008687907853269984665640564039457584007913129639935"}"#, - ); - check_to_json( - Option::::None, - r#"{"cl_type":{"Option":"U256"},"parsed":null}"#, - ); - } - - #[test] - fn u512_cl_value_should_encode_to_json() { - check_to_json( - Some(U512::zero()), - r#"{"cl_type":{"Option":"U512"},"parsed":"0"}"#, - ); - check_to_json( - Some(U512::max_value()), - r#"{"cl_type":{"Option":"U512"},"parsed":"13407807929942597099574024998205846127479365820592393377723561443721764030073546976801874298166903427690031858186486050853753882811946569946433649006084095"}"#, - ); - check_to_json( - Option::::None, - r#"{"cl_type":{"Option":"U512"},"parsed":null}"#, - ); - } - - #[test] - fn unit_cl_value_should_encode_to_json() { - check_to_json(Some(()), r#"{"cl_type":{"Option":"Unit"},"parsed":null}"#); - check_to_json( - Option::<()>::None, - r#"{"cl_type":{"Option":"Unit"},"parsed":null}"#, - ); - } - - #[test] - fn string_cl_value_should_encode_to_json() { - check_to_json( - Some(String::new()), - r#"{"cl_type":{"Option":"String"},"parsed":""}"#, - ); - check_to_json( - Some("test string".to_string()), - r#"{"cl_type":{"Option":"String"},"parsed":"test string"}"#, - ); - check_to_json( - Option::::None, - r#"{"cl_type":{"Option":"String"},"parsed":null}"#, - ); - } - - #[test] - fn key_cl_value_should_encode_to_json() { - let key_account = Key::Account(AccountHash::new([1; ACCOUNT_HASH_LENGTH])); - check_to_json( - Some(key_account), - r#"{"cl_type":{"Option":"Key"},"parsed":"account-hash-0101010101010101010101010101010101010101010101010101010101010101"}"#, - ); - - let key_hash = Key::Hash([2; KEY_HASH_LENGTH]); - check_to_json( - Some(key_hash), - r#"{"cl_type":{"Option":"Key"},"parsed":"hash-0202020202020202020202020202020202020202020202020202020202020202"}"#, - ); - - let key_uref = Key::URef(URef::new([3; UREF_ADDR_LENGTH], AccessRights::READ)); - check_to_json( - Some(key_uref), - r#"{"cl_type":{"Option":"Key"},"parsed":"uref-0303030303030303030303030303030303030303030303030303030303030303-001"}"#, - ); - - let key_transfer = Key::Transfer(TransferAddr::new([4; TRANSFER_ADDR_LENGTH])); - check_to_json( - Some(key_transfer), - r#"{"cl_type":{"Option":"Key"},"parsed":"transfer-0404040404040404040404040404040404040404040404040404040404040404"}"#, - ); - - let key_deploy_info = Key::DeployInfo(DeployHash::from_raw([5; Digest::LENGTH])); - check_to_json( - Some(key_deploy_info), - r#"{"cl_type":{"Option":"Key"},"parsed":"deploy-0505050505050505050505050505050505050505050505050505050505050505"}"#, - ); - - check_to_json( - Option::::None, - r#"{"cl_type":{"Option":"Key"},"parsed":null}"#, - ) - } - - #[test] - fn uref_cl_value_should_encode_to_json() { - let uref = URef::new([6; UREF_ADDR_LENGTH], AccessRights::READ_ADD_WRITE); - check_to_json( - Some(uref), - r#"{"cl_type":{"Option":"URef"},"parsed":"uref-0606060606060606060606060606060606060606060606060606060606060606-007"}"#, - ); - check_to_json( - Option::::None, - r#"{"cl_type":{"Option":"URef"},"parsed":null}"#, - ) - } - - #[test] - fn public_key_cl_value_should_encode_to_json() { - check_to_json( - Some(PublicKey::from( - &SecretKey::ed25519_from_bytes([7; SecretKey::ED25519_LENGTH]).unwrap(), - )), - r#"{"cl_type":{"Option":"PublicKey"},"parsed":"01ea4a6c63e29c520abef5507b132ec5f9954776aebebe7b92421eea691446d22c"}"#, - ); - check_to_json( - Some(PublicKey::from( - &SecretKey::secp256k1_from_bytes([8; SecretKey::SECP256K1_LENGTH]).unwrap(), - )), - r#"{"cl_type":{"Option":"PublicKey"},"parsed":"0203f991f944d1e1954a7fc8b9bf62e0d78f015f4c07762d505e20e6c45260a3661b"}"#, - ); - check_to_json( - Option::::None, - r#"{"cl_type":{"Option":"PublicKey"},"parsed":null}"#, - ) - } - } - - mod result { - use super::*; - use crate::crypto::SecretKey; - - #[test] - fn bool_cl_value_should_encode_to_json() { - check_to_json( - Result::::Ok(true), - r#"{"cl_type":{"Result":{"ok":"Bool","err":"I32"}},"parsed":{"Ok":true}}"#, - ); - check_to_json( - Result::::Ok(true), - r#"{"cl_type":{"Result":{"ok":"Bool","err":"U32"}},"parsed":{"Ok":true}}"#, - ); - check_to_json( - Result::::Ok(true), - r#"{"cl_type":{"Result":{"ok":"Bool","err":"Unit"}},"parsed":{"Ok":true}}"#, - ); - check_to_json( - Result::::Ok(true), - r#"{"cl_type":{"Result":{"ok":"Bool","err":"String"}},"parsed":{"Ok":true}}"#, - ); - check_to_json( - Result::::Err(-1), - r#"{"cl_type":{"Result":{"ok":"Bool","err":"I32"}},"parsed":{"Err":-1}}"#, - ); - check_to_json( - Result::::Err(1), - r#"{"cl_type":{"Result":{"ok":"Bool","err":"U32"}},"parsed":{"Err":1}}"#, - ); - check_to_json( - Result::::Err(()), - r#"{"cl_type":{"Result":{"ok":"Bool","err":"Unit"}},"parsed":{"Err":null}}"#, - ); - check_to_json( - Result::::Err("e".to_string()), - r#"{"cl_type":{"Result":{"ok":"Bool","err":"String"}},"parsed":{"Err":"e"}}"#, - ); - } - - #[test] - fn i32_cl_value_should_encode_to_json() { - check_to_json( - Result::::Ok(-1), - r#"{"cl_type":{"Result":{"ok":"I32","err":"I32"}},"parsed":{"Ok":-1}}"#, - ); - check_to_json( - Result::::Ok(-1), - r#"{"cl_type":{"Result":{"ok":"I32","err":"U32"}},"parsed":{"Ok":-1}}"#, - ); - check_to_json( - Result::::Ok(-1), - r#"{"cl_type":{"Result":{"ok":"I32","err":"Unit"}},"parsed":{"Ok":-1}}"#, - ); - check_to_json( - Result::::Ok(-1), - r#"{"cl_type":{"Result":{"ok":"I32","err":"String"}},"parsed":{"Ok":-1}}"#, - ); - check_to_json( - Result::::Err(-1), - r#"{"cl_type":{"Result":{"ok":"I32","err":"I32"}},"parsed":{"Err":-1}}"#, - ); - check_to_json( - Result::::Err(1), - r#"{"cl_type":{"Result":{"ok":"I32","err":"U32"}},"parsed":{"Err":1}}"#, - ); - check_to_json( - Result::::Err(()), - r#"{"cl_type":{"Result":{"ok":"I32","err":"Unit"}},"parsed":{"Err":null}}"#, - ); - check_to_json( - Result::::Err("e".to_string()), - r#"{"cl_type":{"Result":{"ok":"I32","err":"String"}},"parsed":{"Err":"e"}}"#, - ); - } - - #[test] - fn i64_cl_value_should_encode_to_json() { - check_to_json( - Result::::Ok(-1), - r#"{"cl_type":{"Result":{"ok":"I64","err":"I32"}},"parsed":{"Ok":-1}}"#, - ); - check_to_json( - Result::::Ok(-1), - r#"{"cl_type":{"Result":{"ok":"I64","err":"U32"}},"parsed":{"Ok":-1}}"#, - ); - check_to_json( - Result::::Ok(-1), - r#"{"cl_type":{"Result":{"ok":"I64","err":"Unit"}},"parsed":{"Ok":-1}}"#, - ); - check_to_json( - Result::::Ok(-1), - r#"{"cl_type":{"Result":{"ok":"I64","err":"String"}},"parsed":{"Ok":-1}}"#, - ); - check_to_json( - Result::::Err(-1), - r#"{"cl_type":{"Result":{"ok":"I64","err":"I32"}},"parsed":{"Err":-1}}"#, - ); - check_to_json( - Result::::Err(1), - r#"{"cl_type":{"Result":{"ok":"I64","err":"U32"}},"parsed":{"Err":1}}"#, - ); - check_to_json( - Result::::Err(()), - r#"{"cl_type":{"Result":{"ok":"I64","err":"Unit"}},"parsed":{"Err":null}}"#, - ); - check_to_json( - Result::::Err("e".to_string()), - r#"{"cl_type":{"Result":{"ok":"I64","err":"String"}},"parsed":{"Err":"e"}}"#, - ); - } - - #[test] - fn u8_cl_value_should_encode_to_json() { - check_to_json( - Result::::Ok(1), - r#"{"cl_type":{"Result":{"ok":"U8","err":"I32"}},"parsed":{"Ok":1}}"#, - ); - check_to_json( - Result::::Ok(1), - r#"{"cl_type":{"Result":{"ok":"U8","err":"U32"}},"parsed":{"Ok":1}}"#, - ); - check_to_json( - Result::::Ok(1), - r#"{"cl_type":{"Result":{"ok":"U8","err":"Unit"}},"parsed":{"Ok":1}}"#, - ); - check_to_json( - Result::::Ok(1), - r#"{"cl_type":{"Result":{"ok":"U8","err":"String"}},"parsed":{"Ok":1}}"#, - ); - check_to_json( - Result::::Err(-1), - r#"{"cl_type":{"Result":{"ok":"U8","err":"I32"}},"parsed":{"Err":-1}}"#, - ); - check_to_json( - Result::::Err(1), - r#"{"cl_type":{"Result":{"ok":"U8","err":"U32"}},"parsed":{"Err":1}}"#, - ); - check_to_json( - Result::::Err(()), - r#"{"cl_type":{"Result":{"ok":"U8","err":"Unit"}},"parsed":{"Err":null}}"#, - ); - check_to_json( - Result::::Err("e".to_string()), - r#"{"cl_type":{"Result":{"ok":"U8","err":"String"}},"parsed":{"Err":"e"}}"#, - ); - } - - #[test] - fn u32_cl_value_should_encode_to_json() { - check_to_json( - Result::::Ok(1), - r#"{"cl_type":{"Result":{"ok":"U32","err":"I32"}},"parsed":{"Ok":1}}"#, - ); - check_to_json( - Result::::Ok(1), - r#"{"cl_type":{"Result":{"ok":"U32","err":"U32"}},"parsed":{"Ok":1}}"#, - ); - check_to_json( - Result::::Ok(1), - r#"{"cl_type":{"Result":{"ok":"U32","err":"Unit"}},"parsed":{"Ok":1}}"#, - ); - check_to_json( - Result::::Ok(1), - r#"{"cl_type":{"Result":{"ok":"U32","err":"String"}},"parsed":{"Ok":1}}"#, - ); - check_to_json( - Result::::Err(-1), - r#"{"cl_type":{"Result":{"ok":"U32","err":"I32"}},"parsed":{"Err":-1}}"#, - ); - check_to_json( - Result::::Err(1), - r#"{"cl_type":{"Result":{"ok":"U32","err":"U32"}},"parsed":{"Err":1}}"#, - ); - check_to_json( - Result::::Err(()), - r#"{"cl_type":{"Result":{"ok":"U32","err":"Unit"}},"parsed":{"Err":null}}"#, - ); - check_to_json( - Result::::Err("e".to_string()), - r#"{"cl_type":{"Result":{"ok":"U32","err":"String"}},"parsed":{"Err":"e"}}"#, - ); - } - - #[test] - fn u64_cl_value_should_encode_to_json() { - check_to_json( - Result::::Ok(1), - r#"{"cl_type":{"Result":{"ok":"U64","err":"I32"}},"parsed":{"Ok":1}}"#, - ); - check_to_json( - Result::::Ok(1), - r#"{"cl_type":{"Result":{"ok":"U64","err":"U32"}},"parsed":{"Ok":1}}"#, - ); - check_to_json( - Result::::Ok(1), - r#"{"cl_type":{"Result":{"ok":"U64","err":"Unit"}},"parsed":{"Ok":1}}"#, - ); - check_to_json( - Result::::Ok(1), - r#"{"cl_type":{"Result":{"ok":"U64","err":"String"}},"parsed":{"Ok":1}}"#, - ); - check_to_json( - Result::::Err(-1), - r#"{"cl_type":{"Result":{"ok":"U64","err":"I32"}},"parsed":{"Err":-1}}"#, - ); - check_to_json( - Result::::Err(1), - r#"{"cl_type":{"Result":{"ok":"U64","err":"U32"}},"parsed":{"Err":1}}"#, - ); - check_to_json( - Result::::Err(()), - r#"{"cl_type":{"Result":{"ok":"U64","err":"Unit"}},"parsed":{"Err":null}}"#, - ); - check_to_json( - Result::::Err("e".to_string()), - r#"{"cl_type":{"Result":{"ok":"U64","err":"String"}},"parsed":{"Err":"e"}}"#, - ); - } - - #[test] - fn u128_cl_value_should_encode_to_json() { - check_to_json( - Result::::Ok(1.into()), - r#"{"cl_type":{"Result":{"ok":"U128","err":"I32"}},"parsed":{"Ok":"1"}}"#, - ); - check_to_json( - Result::::Ok(1.into()), - r#"{"cl_type":{"Result":{"ok":"U128","err":"U32"}},"parsed":{"Ok":"1"}}"#, - ); - check_to_json( - Result::::Ok(1.into()), - r#"{"cl_type":{"Result":{"ok":"U128","err":"Unit"}},"parsed":{"Ok":"1"}}"#, - ); - check_to_json( - Result::::Ok(1.into()), - r#"{"cl_type":{"Result":{"ok":"U128","err":"String"}},"parsed":{"Ok":"1"}}"#, - ); - check_to_json( - Result::::Err(-1), - r#"{"cl_type":{"Result":{"ok":"U128","err":"I32"}},"parsed":{"Err":-1}}"#, - ); - check_to_json( - Result::::Err(1), - r#"{"cl_type":{"Result":{"ok":"U128","err":"U32"}},"parsed":{"Err":1}}"#, - ); - check_to_json( - Result::::Err(()), - r#"{"cl_type":{"Result":{"ok":"U128","err":"Unit"}},"parsed":{"Err":null}}"#, - ); - check_to_json( - Result::::Err("e".to_string()), - r#"{"cl_type":{"Result":{"ok":"U128","err":"String"}},"parsed":{"Err":"e"}}"#, - ); - } - - #[test] - fn u256_cl_value_should_encode_to_json() { - check_to_json( - Result::::Ok(1.into()), - r#"{"cl_type":{"Result":{"ok":"U256","err":"I32"}},"parsed":{"Ok":"1"}}"#, - ); - check_to_json( - Result::::Ok(1.into()), - r#"{"cl_type":{"Result":{"ok":"U256","err":"U32"}},"parsed":{"Ok":"1"}}"#, - ); - check_to_json( - Result::::Ok(1.into()), - r#"{"cl_type":{"Result":{"ok":"U256","err":"Unit"}},"parsed":{"Ok":"1"}}"#, - ); - check_to_json( - Result::::Ok(1.into()), - r#"{"cl_type":{"Result":{"ok":"U256","err":"String"}},"parsed":{"Ok":"1"}}"#, - ); - check_to_json( - Result::::Err(-1), - r#"{"cl_type":{"Result":{"ok":"U256","err":"I32"}},"parsed":{"Err":-1}}"#, - ); - check_to_json( - Result::::Err(1), - r#"{"cl_type":{"Result":{"ok":"U256","err":"U32"}},"parsed":{"Err":1}}"#, - ); - check_to_json( - Result::::Err(()), - r#"{"cl_type":{"Result":{"ok":"U256","err":"Unit"}},"parsed":{"Err":null}}"#, - ); - check_to_json( - Result::::Err("e".to_string()), - r#"{"cl_type":{"Result":{"ok":"U256","err":"String"}},"parsed":{"Err":"e"}}"#, - ); - } - - #[test] - fn u512_cl_value_should_encode_to_json() { - check_to_json( - Result::::Ok(1.into()), - r#"{"cl_type":{"Result":{"ok":"U512","err":"I32"}},"parsed":{"Ok":"1"}}"#, - ); - check_to_json( - Result::::Ok(1.into()), - r#"{"cl_type":{"Result":{"ok":"U512","err":"U32"}},"parsed":{"Ok":"1"}}"#, - ); - check_to_json( - Result::::Ok(1.into()), - r#"{"cl_type":{"Result":{"ok":"U512","err":"Unit"}},"parsed":{"Ok":"1"}}"#, - ); - check_to_json( - Result::::Ok(1.into()), - r#"{"cl_type":{"Result":{"ok":"U512","err":"String"}},"parsed":{"Ok":"1"}}"#, - ); - check_to_json( - Result::::Err(-1), - r#"{"cl_type":{"Result":{"ok":"U512","err":"I32"}},"parsed":{"Err":-1}}"#, - ); - check_to_json( - Result::::Err(1), - r#"{"cl_type":{"Result":{"ok":"U512","err":"U32"}},"parsed":{"Err":1}}"#, - ); - check_to_json( - Result::::Err(()), - r#"{"cl_type":{"Result":{"ok":"U512","err":"Unit"}},"parsed":{"Err":null}}"#, - ); - check_to_json( - Result::::Err("e".to_string()), - r#"{"cl_type":{"Result":{"ok":"U512","err":"String"}},"parsed":{"Err":"e"}}"#, - ); - } - - #[test] - fn unit_cl_value_should_encode_to_json() { - check_to_json( - Result::<(), i32>::Ok(()), - r#"{"cl_type":{"Result":{"ok":"Unit","err":"I32"}},"parsed":{"Ok":null}}"#, - ); - check_to_json( - Result::<(), u32>::Ok(()), - r#"{"cl_type":{"Result":{"ok":"Unit","err":"U32"}},"parsed":{"Ok":null}}"#, - ); - check_to_json( - Result::<(), ()>::Ok(()), - r#"{"cl_type":{"Result":{"ok":"Unit","err":"Unit"}},"parsed":{"Ok":null}}"#, - ); - check_to_json( - Result::<(), String>::Ok(()), - r#"{"cl_type":{"Result":{"ok":"Unit","err":"String"}},"parsed":{"Ok":null}}"#, - ); - check_to_json( - Result::<(), i32>::Err(-1), - r#"{"cl_type":{"Result":{"ok":"Unit","err":"I32"}},"parsed":{"Err":-1}}"#, - ); - check_to_json( - Result::<(), u32>::Err(1), - r#"{"cl_type":{"Result":{"ok":"Unit","err":"U32"}},"parsed":{"Err":1}}"#, - ); - check_to_json( - Result::<(), ()>::Err(()), - r#"{"cl_type":{"Result":{"ok":"Unit","err":"Unit"}},"parsed":{"Err":null}}"#, - ); - check_to_json( - Result::<(), String>::Err("e".to_string()), - r#"{"cl_type":{"Result":{"ok":"Unit","err":"String"}},"parsed":{"Err":"e"}}"#, - ); - } - - #[test] - fn string_cl_value_should_encode_to_json() { - check_to_json( - Result::::Ok("test string".to_string()), - r#"{"cl_type":{"Result":{"ok":"String","err":"I32"}},"parsed":{"Ok":"test string"}}"#, - ); - check_to_json( - Result::::Ok("test string".to_string()), - r#"{"cl_type":{"Result":{"ok":"String","err":"U32"}},"parsed":{"Ok":"test string"}}"#, - ); - check_to_json( - Result::::Ok("test string".to_string()), - r#"{"cl_type":{"Result":{"ok":"String","err":"Unit"}},"parsed":{"Ok":"test string"}}"#, - ); - check_to_json( - Result::::Ok("test string".to_string()), - r#"{"cl_type":{"Result":{"ok":"String","err":"String"}},"parsed":{"Ok":"test string"}}"#, - ); - check_to_json( - Result::::Err(-1), - r#"{"cl_type":{"Result":{"ok":"String","err":"I32"}},"parsed":{"Err":-1}}"#, - ); - check_to_json( - Result::::Err(1), - r#"{"cl_type":{"Result":{"ok":"String","err":"U32"}},"parsed":{"Err":1}}"#, - ); - check_to_json( - Result::::Err(()), - r#"{"cl_type":{"Result":{"ok":"String","err":"Unit"}},"parsed":{"Err":null}}"#, - ); - check_to_json( - Result::::Err("e".to_string()), - r#"{"cl_type":{"Result":{"ok":"String","err":"String"}},"parsed":{"Err":"e"}}"#, - ); - } - - #[test] - fn key_cl_value_should_encode_to_json() { - let key = Key::Hash([2; KEY_HASH_LENGTH]); - check_to_json( - Result::::Ok(key), - r#"{"cl_type":{"Result":{"ok":"Key","err":"I32"}},"parsed":{"Ok":"hash-0202020202020202020202020202020202020202020202020202020202020202"}}"#, - ); - check_to_json( - Result::::Ok(key), - r#"{"cl_type":{"Result":{"ok":"Key","err":"U32"}},"parsed":{"Ok":"hash-0202020202020202020202020202020202020202020202020202020202020202"}}"#, - ); - check_to_json( - Result::::Ok(key), - r#"{"cl_type":{"Result":{"ok":"Key","err":"Unit"}},"parsed":{"Ok":"hash-0202020202020202020202020202020202020202020202020202020202020202"}}"#, - ); - check_to_json( - Result::::Ok(key), - r#"{"cl_type":{"Result":{"ok":"Key","err":"String"}},"parsed":{"Ok":"hash-0202020202020202020202020202020202020202020202020202020202020202"}}"#, - ); - check_to_json( - Result::::Err(-1), - r#"{"cl_type":{"Result":{"ok":"Key","err":"I32"}},"parsed":{"Err":-1}}"#, - ); - check_to_json( - Result::::Err(1), - r#"{"cl_type":{"Result":{"ok":"Key","err":"U32"}},"parsed":{"Err":1}}"#, - ); - check_to_json( - Result::::Err(()), - r#"{"cl_type":{"Result":{"ok":"Key","err":"Unit"}},"parsed":{"Err":null}}"#, - ); - check_to_json( - Result::::Err("e".to_string()), - r#"{"cl_type":{"Result":{"ok":"Key","err":"String"}},"parsed":{"Err":"e"}}"#, - ); - } - - #[test] - fn uref_cl_value_should_encode_to_json() { - let uref = URef::new([6; UREF_ADDR_LENGTH], AccessRights::READ_ADD_WRITE); - check_to_json( - Result::::Ok(uref), - r#"{"cl_type":{"Result":{"ok":"URef","err":"I32"}},"parsed":{"Ok":"uref-0606060606060606060606060606060606060606060606060606060606060606-007"}}"#, - ); - check_to_json( - Result::::Ok(uref), - r#"{"cl_type":{"Result":{"ok":"URef","err":"U32"}},"parsed":{"Ok":"uref-0606060606060606060606060606060606060606060606060606060606060606-007"}}"#, - ); - check_to_json( - Result::::Ok(uref), - r#"{"cl_type":{"Result":{"ok":"URef","err":"Unit"}},"parsed":{"Ok":"uref-0606060606060606060606060606060606060606060606060606060606060606-007"}}"#, - ); - check_to_json( - Result::::Ok(uref), - r#"{"cl_type":{"Result":{"ok":"URef","err":"String"}},"parsed":{"Ok":"uref-0606060606060606060606060606060606060606060606060606060606060606-007"}}"#, - ); - check_to_json( - Result::::Err(-1), - r#"{"cl_type":{"Result":{"ok":"URef","err":"I32"}},"parsed":{"Err":-1}}"#, - ); - check_to_json( - Result::::Err(1), - r#"{"cl_type":{"Result":{"ok":"URef","err":"U32"}},"parsed":{"Err":1}}"#, - ); - check_to_json( - Result::::Err(()), - r#"{"cl_type":{"Result":{"ok":"URef","err":"Unit"}},"parsed":{"Err":null}}"#, - ); - check_to_json( - Result::::Err("e".to_string()), - r#"{"cl_type":{"Result":{"ok":"URef","err":"String"}},"parsed":{"Err":"e"}}"#, - ); - } - - #[test] - fn public_key_cl_value_should_encode_to_json() { - let secret_key = - SecretKey::secp256k1_from_bytes([8; SecretKey::SECP256K1_LENGTH]).unwrap(); - let public_key = PublicKey::from(&secret_key); - check_to_json( - Result::::Ok(public_key.clone()), - r#"{"cl_type":{"Result":{"ok":"PublicKey","err":"I32"}},"parsed":{"Ok":"0203f991f944d1e1954a7fc8b9bf62e0d78f015f4c07762d505e20e6c45260a3661b"}}"#, - ); - check_to_json( - Result::::Ok(public_key.clone()), - r#"{"cl_type":{"Result":{"ok":"PublicKey","err":"U32"}},"parsed":{"Ok":"0203f991f944d1e1954a7fc8b9bf62e0d78f015f4c07762d505e20e6c45260a3661b"}}"#, - ); - check_to_json( - Result::::Ok(public_key.clone()), - r#"{"cl_type":{"Result":{"ok":"PublicKey","err":"Unit"}},"parsed":{"Ok":"0203f991f944d1e1954a7fc8b9bf62e0d78f015f4c07762d505e20e6c45260a3661b"}}"#, - ); - check_to_json( - Result::::Ok(public_key), - r#"{"cl_type":{"Result":{"ok":"PublicKey","err":"String"}},"parsed":{"Ok":"0203f991f944d1e1954a7fc8b9bf62e0d78f015f4c07762d505e20e6c45260a3661b"}}"#, - ); - check_to_json( - Result::::Err(-1), - r#"{"cl_type":{"Result":{"ok":"PublicKey","err":"I32"}},"parsed":{"Err":-1}}"#, - ); - check_to_json( - Result::::Err(1), - r#"{"cl_type":{"Result":{"ok":"PublicKey","err":"U32"}},"parsed":{"Err":1}}"#, - ); - check_to_json( - Result::::Err(()), - r#"{"cl_type":{"Result":{"ok":"PublicKey","err":"Unit"}},"parsed":{"Err":null}}"#, - ); - check_to_json( - Result::::Err("e".to_string()), - r#"{"cl_type":{"Result":{"ok":"PublicKey","err":"String"}},"parsed":{"Err":"e"}}"#, - ); - } - } -} diff --git a/casper_types_ver_2_0/src/cl_value/jsonrepr.rs b/casper_types_ver_2_0/src/cl_value/jsonrepr.rs deleted file mode 100644 index 1b3b3e28..00000000 --- a/casper_types_ver_2_0/src/cl_value/jsonrepr.rs +++ /dev/null @@ -1,272 +0,0 @@ -use alloc::{string::String, vec, vec::Vec}; - -use serde::Serialize; -use serde_json::{json, Value}; - -use crate::{ - bytesrepr::{self, FromBytes, OPTION_NONE_TAG, OPTION_SOME_TAG, RESULT_ERR_TAG, RESULT_OK_TAG}, - cl_type::CL_TYPE_RECURSION_DEPTH, - CLType, CLValue, Key, PublicKey, URef, U128, U256, U512, -}; - -/// Returns a best-effort attempt to convert the `CLValue` into a meaningful JSON value. -pub fn cl_value_to_json(cl_value: &CLValue) -> Option { - depth_limited_to_json(0, cl_value.cl_type(), cl_value.inner_bytes()).and_then( - |(json_value, remainder)| { - if remainder.is_empty() { - Some(json_value) - } else { - None - } - }, - ) -} - -fn depth_limited_to_json<'a>( - depth: u8, - cl_type: &CLType, - bytes: &'a [u8], -) -> Option<(Value, &'a [u8])> { - if depth >= CL_TYPE_RECURSION_DEPTH { - return None; - } - let depth = depth + 1; - - match cl_type { - CLType::Bool => simple_type_to_json::(bytes), - CLType::I32 => simple_type_to_json::(bytes), - CLType::I64 => simple_type_to_json::(bytes), - CLType::U8 => simple_type_to_json::(bytes), - CLType::U32 => simple_type_to_json::(bytes), - CLType::U64 => simple_type_to_json::(bytes), - CLType::U128 => simple_type_to_json::(bytes), - CLType::U256 => simple_type_to_json::(bytes), - CLType::U512 => simple_type_to_json::(bytes), - CLType::Unit => simple_type_to_json::<()>(bytes), - CLType::String => simple_type_to_json::(bytes), - CLType::Key => simple_type_to_json::(bytes), - CLType::URef => simple_type_to_json::(bytes), - CLType::PublicKey => simple_type_to_json::(bytes), - CLType::Option(inner_cl_type) => { - let (variant, remainder) = u8::from_bytes(bytes).ok()?; - match variant { - OPTION_NONE_TAG => Some((Value::Null, remainder)), - OPTION_SOME_TAG => Some(depth_limited_to_json(depth, inner_cl_type, remainder)?), - _ => None, - } - } - CLType::List(inner_cl_type) => { - let (count, mut stream) = u32::from_bytes(bytes).ok()?; - let mut result: Vec = Vec::new(); - for _ in 0..count { - let (value, remainder) = depth_limited_to_json(depth, inner_cl_type, stream)?; - result.push(value); - stream = remainder; - } - Some((json!(result), stream)) - } - CLType::ByteArray(length) => { - let (bytes, remainder) = bytesrepr::safe_split_at(bytes, *length as usize).ok()?; - let hex_encoded_bytes = base16::encode_lower(&bytes); - Some((json![hex_encoded_bytes], remainder)) - } - CLType::Result { ok, err } => { - let (variant, remainder) = u8::from_bytes(bytes).ok()?; - match variant { - RESULT_ERR_TAG => { - let (value, remainder) = depth_limited_to_json(depth, err, remainder)?; - Some((json!({ "Err": value }), remainder)) - } - RESULT_OK_TAG => { - let (value, remainder) = depth_limited_to_json(depth, ok, remainder)?; - Some((json!({ "Ok": value }), remainder)) - } - _ => None, - } - } - CLType::Map { key, value } => { - let (num_keys, mut stream) = u32::from_bytes(bytes).ok()?; - let mut result: Vec = Vec::new(); - for _ in 0..num_keys { - let (k, remainder) = depth_limited_to_json(depth, key, stream)?; - let (v, remainder) = depth_limited_to_json(depth, value, remainder)?; - result.push(json!({"key": k, "value": v})); - stream = remainder; - } - Some((json!(result), stream)) - } - CLType::Tuple1(arr) => { - let (t1, remainder) = depth_limited_to_json(depth, &arr[0], bytes)?; - Some((json!([t1]), remainder)) - } - CLType::Tuple2(arr) => { - let (t1, remainder) = depth_limited_to_json(depth, &arr[0], bytes)?; - let (t2, remainder) = depth_limited_to_json(depth, &arr[1], remainder)?; - Some((json!([t1, t2]), remainder)) - } - CLType::Tuple3(arr) => { - let (t1, remainder) = depth_limited_to_json(depth, &arr[0], bytes)?; - let (t2, remainder) = depth_limited_to_json(depth, &arr[1], remainder)?; - let (t3, remainder) = depth_limited_to_json(depth, &arr[2], remainder)?; - Some((json!([t1, t2, t3]), remainder)) - } - CLType::Any => None, - } -} - -fn simple_type_to_json(bytes: &[u8]) -> Option<(Value, &[u8])> { - let (value, remainder) = T::from_bytes(bytes).ok()?; - Some((json!(value), remainder)) -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::{bytesrepr::ToBytes, AsymmetricType, CLTyped, SecretKey}; - use alloc::collections::BTreeMap; - - fn test_value(value: T) { - let cl_value = CLValue::from_t(value.clone()).unwrap(); - let cl_value_as_json: Value = cl_value_to_json(&cl_value).unwrap(); - let expected = json!(value); - assert_eq!(cl_value_as_json, expected); - } - - #[test] - fn list_of_ints_to_json_value() { - test_value::>(vec![]); - test_value(vec![10u32, 12u32]); - } - - #[test] - fn list_of_bools_to_json_value() { - test_value(vec![true, false]); - } - - #[test] - fn list_of_string_to_json_value() { - test_value(vec!["rust", "python"]); - } - - #[test] - fn list_of_public_keys_to_json_value() { - let a = PublicKey::from( - &SecretKey::secp256k1_from_bytes([3; SecretKey::SECP256K1_LENGTH]).unwrap(), - ); - let b = PublicKey::from( - &SecretKey::ed25519_from_bytes([3; SecretKey::ED25519_LENGTH]).unwrap(), - ); - let a_hex = a.to_hex(); - let b_hex = b.to_hex(); - let cl_value = CLValue::from_t(vec![a, b]).unwrap(); - let cl_value_as_json: Value = cl_value_to_json(&cl_value).unwrap(); - let expected = json!([a_hex, b_hex]); - assert_eq!(cl_value_as_json, expected); - } - - #[test] - fn list_of_list_of_public_keys_to_json_value() { - let a = PublicKey::from( - &SecretKey::secp256k1_from_bytes([3; SecretKey::SECP256K1_LENGTH]).unwrap(), - ); - let b = PublicKey::from( - &SecretKey::ed25519_from_bytes([3; PublicKey::ED25519_LENGTH]).unwrap(), - ); - let c = PublicKey::from( - &SecretKey::ed25519_from_bytes([6; PublicKey::ED25519_LENGTH]).unwrap(), - ); - let a_hex = a.to_hex(); - let b_hex = b.to_hex(); - let c_hex = c.to_hex(); - let cl_value = CLValue::from_t(vec![vec![a, b], vec![c]]).unwrap(); - let cl_value_as_json: Value = cl_value_to_json(&cl_value).unwrap(); - let expected = json!([[a_hex, b_hex], [c_hex]]); - assert_eq!(cl_value_as_json, expected); - } - - #[test] - fn map_of_string_to_list_of_ints_to_json_value() { - let key1 = String::from("first"); - let key2 = String::from("second"); - let value1 = vec![]; - let value2 = vec![1, 2, 3]; - let mut map: BTreeMap> = BTreeMap::new(); - map.insert(key1.clone(), value1.clone()); - map.insert(key2.clone(), value2.clone()); - let cl_value = CLValue::from_t(map).unwrap(); - let cl_value_as_json: Value = cl_value_to_json(&cl_value).unwrap(); - let expected = json!([ - { "key": key1, "value": value1 }, - { "key": key2, "value": value2 } - ]); - assert_eq!(cl_value_as_json, expected); - } - - #[test] - fn option_some_of_lists_to_json_value() { - test_value(Some(vec![1, 2, 3])); - } - - #[test] - fn option_none_to_json_value() { - test_value(Option::::None); - } - - #[test] - fn bytes_to_json_value() { - let bytes = [1_u8, 2]; - let cl_value = CLValue::from_t(bytes).unwrap(); - let cl_value_as_json = cl_value_to_json(&cl_value).unwrap(); - let expected = json!(base16::encode_lower(&bytes)); - assert_eq!(cl_value_as_json, expected); - } - - #[test] - fn result_ok_to_json_value() { - test_value(Result::, String>::Ok(vec![1, 2, 3])); - } - - #[test] - fn result_error_to_json_value() { - test_value(Result::, String>::Err(String::from("Upsss"))); - } - - #[test] - fn tuples_to_json_value() { - let v1 = String::from("Hello"); - let v2 = vec![1, 2, 3]; - let v3 = 1u8; - - test_value((v1.clone(),)); - test_value((v1.clone(), v2.clone())); - test_value((v1, v2, v3)); - } - - #[test] - fn json_encoding_nested_tuple_1_value_should_not_stack_overflow() { - // Returns a CLType corresponding to (((...(cl_type,),...),),) nested in tuples to - // `depth_limit`. - fn wrap_in_tuple1(cl_type: CLType, current_depth: usize, depth_limit: usize) -> CLType { - if current_depth == depth_limit { - return cl_type; - } - wrap_in_tuple1( - CLType::Tuple1([Box::new(cl_type)]), - current_depth + 1, - depth_limit, - ) - } - - for depth_limit in &[1, CL_TYPE_RECURSION_DEPTH as usize] { - let cl_type = wrap_in_tuple1(CLType::Unit, 1, *depth_limit); - let cl_value = CLValue::from_components(cl_type, vec![]); - assert!(cl_value_to_json(&cl_value).is_some()); - } - - for depth_limit in &[CL_TYPE_RECURSION_DEPTH as usize + 1, 1000] { - let cl_type = wrap_in_tuple1(CLType::Unit, 1, *depth_limit); - let cl_value = CLValue::from_components(cl_type, vec![]); - assert!(cl_value_to_json(&cl_value).is_none()); - } - } -} diff --git a/casper_types_ver_2_0/src/contract_messages.rs b/casper_types_ver_2_0/src/contract_messages.rs deleted file mode 100644 index 7bf3ccc9..00000000 --- a/casper_types_ver_2_0/src/contract_messages.rs +++ /dev/null @@ -1,228 +0,0 @@ -//! Data types for interacting with contract level messages. - -mod error; -mod messages; -mod topics; - -pub use error::FromStrError; -pub use messages::{Message, MessageChecksum, MessagePayload, Messages}; -pub use topics::{ - MessageTopicOperation, MessageTopicSummary, TopicNameHash, TOPIC_NAME_HASH_LENGTH, -}; - -use crate::{ - alloc::string::ToString, - bytesrepr::{self, FromBytes, ToBytes}, - checksummed_hex, AddressableEntityHash, KEY_HASH_LENGTH, -}; - -use core::convert::TryFrom; - -use alloc::{string::String, vec::Vec}; -use core::fmt::{Debug, Display, Formatter}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -use rand::{ - distributions::{Distribution, Standard}, - Rng, -}; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -const TOPIC_FORMATTED_STRING_PREFIX: &str = "topic-"; -const MESSAGE_ADDR_PREFIX: &str = "message-"; - -/// MessageTopicAddr -#[derive(PartialOrd, Ord, PartialEq, Eq, Hash, Clone, Copy, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[cfg_attr(feature = "datasize", derive(DataSize))] -pub struct MessageAddr { - /// The entity addr. - entity_addr: AddressableEntityHash, - /// The hash of the name of the message topic. - topic_name_hash: TopicNameHash, - /// The message index. - message_index: Option, -} - -impl MessageAddr { - /// Constructs a new topic address based on the addressable entity addr and the hash of the - /// message topic name. - pub const fn new_topic_addr( - entity_addr: AddressableEntityHash, - topic_name_hash: TopicNameHash, - ) -> Self { - Self { - entity_addr, - topic_name_hash, - message_index: None, - } - } - - /// Constructs a new message address based on the addressable entity addr, the hash of the - /// message topic name and the message index in the topic. - pub const fn new_message_addr( - entity_addr: AddressableEntityHash, - topic_name_hash: TopicNameHash, - message_index: u32, - ) -> Self { - Self { - entity_addr, - topic_name_hash, - message_index: Some(message_index), - } - } - - /// Formats the [`MessageAddr`] as a prefixed, hex-encoded string. - pub fn to_formatted_string(self) -> String { - match self.message_index { - Some(index) => { - format!( - "{}{}-{}-{:x}", - MESSAGE_ADDR_PREFIX, - base16::encode_lower(&self.entity_addr), - self.topic_name_hash.to_formatted_string(), - index, - ) - } - None => { - format!( - "{}{}{}-{}", - MESSAGE_ADDR_PREFIX, - TOPIC_FORMATTED_STRING_PREFIX, - base16::encode_lower(&self.entity_addr), - self.topic_name_hash.to_formatted_string(), - ) - } - } - } - - /// Parses a formatted string into a [`MessageAddr`]. - pub fn from_formatted_str(input: &str) -> Result { - let remainder = input - .strip_prefix(MESSAGE_ADDR_PREFIX) - .ok_or(FromStrError::InvalidPrefix)?; - - let (remainder, message_index) = match remainder.strip_prefix(TOPIC_FORMATTED_STRING_PREFIX) - { - Some(topic_string) => (topic_string, None), - None => { - let (remainder, message_index_str) = remainder - .rsplit_once('-') - .ok_or(FromStrError::MissingMessageIndex)?; - (remainder, Some(u32::from_str_radix(message_index_str, 16)?)) - } - }; - - let (entity_addr_str, topic_name_hash_str) = remainder - .split_once('-') - .ok_or(FromStrError::MissingMessageIndex)?; - - let bytes = checksummed_hex::decode(entity_addr_str)?; - let entity_addr = ::try_from(bytes[0..KEY_HASH_LENGTH].as_ref()) - .map_err(|err| FromStrError::EntityHashParseError(err.to_string()))?; - - let topic_name_hash = TopicNameHash::from_formatted_str(topic_name_hash_str)?; - Ok(MessageAddr { - entity_addr, - topic_name_hash, - message_index, - }) - } - - /// Returns the entity addr of this message topic. - pub fn entity_addr(&self) -> AddressableEntityHash { - self.entity_addr - } -} - -impl Display for MessageAddr { - fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result { - match self.message_index { - Some(index) => { - write!( - f, - "{}-{}-{:x}", - base16::encode_lower(&self.entity_addr), - self.topic_name_hash, - index, - ) - } - None => { - write!( - f, - "{}-{}", - base16::encode_lower(&self.entity_addr), - self.topic_name_hash, - ) - } - } - } -} - -impl ToBytes for MessageAddr { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - buffer.append(&mut self.entity_addr.to_bytes()?); - buffer.append(&mut self.topic_name_hash.to_bytes()?); - buffer.append(&mut self.message_index.to_bytes()?); - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - self.entity_addr.serialized_length() - + self.topic_name_hash.serialized_length() - + self.message_index.serialized_length() - } -} - -impl FromBytes for MessageAddr { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (entity_addr, rem) = FromBytes::from_bytes(bytes)?; - let (topic_hash, rem) = FromBytes::from_bytes(rem)?; - let (message_index, rem) = FromBytes::from_bytes(rem)?; - Ok(( - MessageAddr { - entity_addr, - topic_name_hash: topic_hash, - message_index, - }, - rem, - )) - } -} - -impl Distribution for Standard { - fn sample(&self, rng: &mut R) -> MessageAddr { - MessageAddr { - entity_addr: rng.gen(), - topic_name_hash: rng.gen(), - message_index: rng.gen(), - } - } -} - -#[cfg(test)] -mod tests { - use crate::{bytesrepr, KEY_HASH_LENGTH}; - - use super::{topics::TOPIC_NAME_HASH_LENGTH, *}; - - #[test] - fn serialization_roundtrip() { - let topic_addr = MessageAddr::new_topic_addr( - [1; KEY_HASH_LENGTH].into(), - [2; TOPIC_NAME_HASH_LENGTH].into(), - ); - bytesrepr::test_serialization_roundtrip(&topic_addr); - - let message_addr = MessageAddr::new_message_addr( - [1; KEY_HASH_LENGTH].into(), - [2; TOPIC_NAME_HASH_LENGTH].into(), - 3, - ); - bytesrepr::test_serialization_roundtrip(&message_addr); - } -} diff --git a/casper_types_ver_2_0/src/contract_messages/error.rs b/casper_types_ver_2_0/src/contract_messages/error.rs deleted file mode 100644 index ba7f2cd3..00000000 --- a/casper_types_ver_2_0/src/contract_messages/error.rs +++ /dev/null @@ -1,74 +0,0 @@ -use core::array::TryFromSliceError; - -use alloc::string::String; -use core::{ - fmt::{self, Debug, Display, Formatter}, - num::ParseIntError, -}; - -/// Error while parsing message hashes from string. -#[derive(Debug)] -#[non_exhaustive] -pub enum FromStrError { - /// The prefix is invalid. - InvalidPrefix, - /// No message index at the end of the string. - MissingMessageIndex, - /// String not formatted correctly. - Formatting, - /// Cannot parse entity hash. - EntityHashParseError(String), - /// Cannot parse message topic hash. - MessageTopicParseError(String), - /// Failed to decode address portion of URef. - Hex(base16::DecodeError), - /// Failed to parse an int. - Int(ParseIntError), - /// The slice is the wrong length. - Length(TryFromSliceError), -} - -impl From for FromStrError { - fn from(error: base16::DecodeError) -> Self { - FromStrError::Hex(error) - } -} - -impl From for FromStrError { - fn from(error: ParseIntError) -> Self { - FromStrError::Int(error) - } -} - -impl From for FromStrError { - fn from(error: TryFromSliceError) -> Self { - FromStrError::Length(error) - } -} - -impl Display for FromStrError { - fn fmt(&self, f: &mut Formatter) -> fmt::Result { - match self { - FromStrError::InvalidPrefix => { - write!(f, "prefix is invalid") - } - FromStrError::MissingMessageIndex => { - write!(f, "no message index found at the end of the string") - } - FromStrError::Formatting => { - write!(f, "string not properly formatted") - } - FromStrError::EntityHashParseError(err) => { - write!(f, "could not parse entity hash: {}", err) - } - FromStrError::MessageTopicParseError(err) => { - write!(f, "could not parse topic hash: {}", err) - } - FromStrError::Hex(error) => { - write!(f, "failed to decode address portion from hex: {}", error) - } - FromStrError::Int(error) => write!(f, "failed to parse an int: {}", error), - FromStrError::Length(error) => write!(f, "address portion is wrong length: {}", error), - } - } -} diff --git a/casper_types_ver_2_0/src/contract_messages/messages.rs b/casper_types_ver_2_0/src/contract_messages/messages.rs deleted file mode 100644 index 0f229e6d..00000000 --- a/casper_types_ver_2_0/src/contract_messages/messages.rs +++ /dev/null @@ -1,323 +0,0 @@ -use crate::{ - bytesrepr::{self, FromBytes, ToBytes, U8_SERIALIZED_LENGTH}, - checksummed_hex, AddressableEntityHash, Key, -}; - -use alloc::{string::String, vec::Vec}; -use core::{convert::TryFrom, fmt::Debug}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(any(feature = "testing", test))] -use rand::{ - distributions::{Alphanumeric, DistString, Distribution, Standard}, - Rng, -}; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{de::Error as SerdeError, Deserialize, Deserializer, Serialize, Serializer}; - -use super::{FromStrError, TopicNameHash}; - -/// Collection of multiple messages. -pub type Messages = Vec; - -/// The length of a message digest -pub const MESSAGE_CHECKSUM_LENGTH: usize = 32; - -const MESSAGE_CHECKSUM_STRING_PREFIX: &str = "message-checksum-"; - -/// A newtype wrapping an array which contains the raw bytes of -/// the hash of the message emitted. -#[derive(Default, PartialOrd, Ord, PartialEq, Eq, Clone, Copy, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr( - feature = "json-schema", - derive(JsonSchema), - schemars(description = "Message checksum as a formatted string.") -)] -pub struct MessageChecksum( - #[cfg_attr(feature = "json-schema", schemars(skip, with = "String"))] - pub [u8; MESSAGE_CHECKSUM_LENGTH], -); - -impl MessageChecksum { - /// Returns inner value of the message checksum. - pub fn value(&self) -> [u8; MESSAGE_CHECKSUM_LENGTH] { - self.0 - } - - /// Formats the `MessageChecksum` as a human readable string. - pub fn to_formatted_string(self) -> String { - format!( - "{}{}", - MESSAGE_CHECKSUM_STRING_PREFIX, - base16::encode_lower(&self.0), - ) - } - - /// Parses a string formatted as per `Self::to_formatted_string()` into a - /// `MessageChecksum`. - pub fn from_formatted_str(input: &str) -> Result { - let hex_addr = input - .strip_prefix(MESSAGE_CHECKSUM_STRING_PREFIX) - .ok_or(FromStrError::InvalidPrefix)?; - - let bytes = - <[u8; MESSAGE_CHECKSUM_LENGTH]>::try_from(checksummed_hex::decode(hex_addr)?.as_ref())?; - Ok(MessageChecksum(bytes)) - } -} - -impl ToBytes for MessageChecksum { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - buffer.append(&mut self.0.to_bytes()?); - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - self.0.serialized_length() - } -} - -impl FromBytes for MessageChecksum { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (checksum, rem) = FromBytes::from_bytes(bytes)?; - Ok((MessageChecksum(checksum), rem)) - } -} - -impl Serialize for MessageChecksum { - fn serialize(&self, serializer: S) -> Result { - if serializer.is_human_readable() { - self.to_formatted_string().serialize(serializer) - } else { - self.0.serialize(serializer) - } - } -} - -impl<'de> Deserialize<'de> for MessageChecksum { - fn deserialize>(deserializer: D) -> Result { - if deserializer.is_human_readable() { - let formatted_string = String::deserialize(deserializer)?; - MessageChecksum::from_formatted_str(&formatted_string).map_err(SerdeError::custom) - } else { - let bytes = <[u8; MESSAGE_CHECKSUM_LENGTH]>::deserialize(deserializer)?; - Ok(MessageChecksum(bytes)) - } - } -} - -const MESSAGE_PAYLOAD_TAG_LENGTH: usize = U8_SERIALIZED_LENGTH; - -/// Tag for a message payload that contains a human readable string. -pub const MESSAGE_PAYLOAD_STRING_TAG: u8 = 0; - -/// The payload of the message emitted by an addressable entity during execution. -#[derive(Clone, Eq, PartialEq, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -pub enum MessagePayload { - /// Human readable string message. - String(String), -} - -impl From for MessagePayload -where - T: Into, -{ - fn from(value: T) -> Self { - Self::String(value.into()) - } -} - -impl ToBytes for MessagePayload { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - match self { - MessagePayload::String(message_string) => { - buffer.insert(0, MESSAGE_PAYLOAD_STRING_TAG); - buffer.extend(message_string.to_bytes()?); - } - } - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - MESSAGE_PAYLOAD_TAG_LENGTH - + match self { - MessagePayload::String(message_string) => message_string.serialized_length(), - } - } -} - -impl FromBytes for MessagePayload { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (tag, remainder) = u8::from_bytes(bytes)?; - match tag { - MESSAGE_PAYLOAD_STRING_TAG => { - let (message, remainder): (String, _) = FromBytes::from_bytes(remainder)?; - Ok((Self::String(message), remainder)) - } - _ => Err(bytesrepr::Error::Formatting), - } - } -} - -/// Message that was emitted by an addressable entity during execution. -#[derive(Clone, Eq, PartialEq, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -pub struct Message { - /// The identity of the entity that produced the message. - entity_addr: AddressableEntityHash, - /// The payload of the message. - message: MessagePayload, - /// The name of the topic on which the message was emitted on. - topic_name: String, - /// The hash of the name of the topic. - topic_name_hash: TopicNameHash, - /// Message index in the topic. - index: u32, -} - -impl Message { - /// Creates new instance of [`Message`] with the specified source and message payload. - pub fn new( - source: AddressableEntityHash, - message: MessagePayload, - topic_name: String, - topic_name_hash: TopicNameHash, - index: u32, - ) -> Self { - Self { - entity_addr: source, - message, - topic_name, - topic_name_hash, - index, - } - } - - /// Returns a reference to the identity of the entity that produced the message. - pub fn entity_addr(&self) -> &AddressableEntityHash { - &self.entity_addr - } - - /// Returns a reference to the payload of the message. - pub fn payload(&self) -> &MessagePayload { - &self.message - } - - /// Returns a reference to the name of the topic on which the message was emitted on. - pub fn topic_name(&self) -> &String { - &self.topic_name - } - - /// Returns a reference to the hash of the name of the topic. - pub fn topic_name_hash(&self) -> &TopicNameHash { - &self.topic_name_hash - } - - /// Returns the index of the message in the topic. - pub fn index(&self) -> u32 { - self.index - } - - /// Returns a new [`Key::Message`] based on the information in the message. - /// This key can be used to query the checksum record for the message in global state. - pub fn message_key(&self) -> Key { - Key::message(self.entity_addr, self.topic_name_hash, self.index) - } - - /// Returns a new [`Key::Message`] based on the information in the message. - /// This key can be used to query the control record for the topic of this message in global - /// state. - pub fn topic_key(&self) -> Key { - Key::message_topic(self.entity_addr, self.topic_name_hash) - } -} - -impl ToBytes for Message { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - buffer.append(&mut self.entity_addr.to_bytes()?); - buffer.append(&mut self.message.to_bytes()?); - buffer.append(&mut self.topic_name.to_bytes()?); - buffer.append(&mut self.topic_name_hash.to_bytes()?); - buffer.append(&mut self.index.to_bytes()?); - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - self.entity_addr.serialized_length() - + self.message.serialized_length() - + self.topic_name.serialized_length() - + self.topic_name_hash.serialized_length() - + self.index.serialized_length() - } -} - -impl FromBytes for Message { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (entity_addr, rem) = FromBytes::from_bytes(bytes)?; - let (message, rem) = FromBytes::from_bytes(rem)?; - let (topic_name, rem) = FromBytes::from_bytes(rem)?; - let (topic_name_hash, rem) = FromBytes::from_bytes(rem)?; - let (index, rem) = FromBytes::from_bytes(rem)?; - Ok(( - Message { - entity_addr, - message, - topic_name, - topic_name_hash, - index, - }, - rem, - )) - } -} - -#[cfg(any(feature = "testing", test))] -impl Distribution for Standard { - fn sample(&self, rng: &mut R) -> Message { - let topic_name = Alphanumeric.sample_string(rng, 32); - let topic_name_hash = crate::crypto::blake2b(&topic_name).into(); - let message = Alphanumeric.sample_string(rng, 64).into(); - - Message { - entity_addr: rng.gen(), - message, - topic_name, - topic_name_hash, - index: rng.gen(), - } - } -} - -#[cfg(test)] -mod tests { - use crate::{bytesrepr, contract_messages::topics::TOPIC_NAME_HASH_LENGTH, KEY_HASH_LENGTH}; - - use super::*; - - #[test] - fn serialization_roundtrip() { - let message_checksum = MessageChecksum([1; MESSAGE_CHECKSUM_LENGTH]); - bytesrepr::test_serialization_roundtrip(&message_checksum); - - let message_payload = "message payload".into(); - bytesrepr::test_serialization_roundtrip(&message_payload); - - let message = Message::new( - [1; KEY_HASH_LENGTH].into(), - message_payload, - "test_topic".to_string(), - TopicNameHash::new([0x4du8; TOPIC_NAME_HASH_LENGTH]), - 10, - ); - bytesrepr::test_serialization_roundtrip(&message); - } -} diff --git a/casper_types_ver_2_0/src/contract_messages/topics.rs b/casper_types_ver_2_0/src/contract_messages/topics.rs deleted file mode 100644 index 9a41d3e3..00000000 --- a/casper_types_ver_2_0/src/contract_messages/topics.rs +++ /dev/null @@ -1,254 +0,0 @@ -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - checksummed_hex, BlockTime, -}; - -use core::convert::TryFrom; - -use alloc::{string::String, vec::Vec}; -use core::fmt::{Debug, Display, Formatter}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -use rand::{ - distributions::{Distribution, Standard}, - Rng, -}; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{de::Error as SerdeError, Deserialize, Deserializer, Serialize, Serializer}; - -use super::error::FromStrError; - -/// The length in bytes of a topic name hash. -pub const TOPIC_NAME_HASH_LENGTH: usize = 32; -const MESSAGE_TOPIC_NAME_HASH: &str = "topic-name-"; - -/// The hash of the name of the message topic. -#[derive(Default, PartialOrd, Ord, PartialEq, Eq, Clone, Copy, Hash)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr( - feature = "json-schema", - derive(JsonSchema), - schemars(description = "The hash of the name of the message topic.") -)] -pub struct TopicNameHash( - #[cfg_attr(feature = "json-schema", schemars(skip, with = "String"))] - pub [u8; TOPIC_NAME_HASH_LENGTH], -); - -impl TopicNameHash { - /// Returns a new [`TopicNameHash`] based on the specified value. - pub const fn new(topic_name_hash: [u8; TOPIC_NAME_HASH_LENGTH]) -> TopicNameHash { - TopicNameHash(topic_name_hash) - } - - /// Returns inner value of the topic hash. - pub fn value(&self) -> [u8; TOPIC_NAME_HASH_LENGTH] { - self.0 - } - - /// Formats the [`TopicNameHash`] as a prefixed, hex-encoded string. - pub fn to_formatted_string(self) -> String { - format!( - "{}{}", - MESSAGE_TOPIC_NAME_HASH, - base16::encode_lower(&self.0), - ) - } - - /// Parses a string formatted as per `Self::to_formatted_string()` into a [`TopicNameHash`]. - pub fn from_formatted_str(input: &str) -> Result { - let remainder = input - .strip_prefix(MESSAGE_TOPIC_NAME_HASH) - .ok_or(FromStrError::InvalidPrefix)?; - let bytes = - <[u8; TOPIC_NAME_HASH_LENGTH]>::try_from(checksummed_hex::decode(remainder)?.as_ref())?; - Ok(TopicNameHash(bytes)) - } -} - -impl ToBytes for TopicNameHash { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - buffer.append(&mut self.0.to_bytes()?); - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - self.0.serialized_length() - } -} - -impl FromBytes for TopicNameHash { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (hash, rem) = FromBytes::from_bytes(bytes)?; - Ok((TopicNameHash(hash), rem)) - } -} - -impl Serialize for TopicNameHash { - fn serialize(&self, serializer: S) -> Result { - if serializer.is_human_readable() { - self.to_formatted_string().serialize(serializer) - } else { - self.0.serialize(serializer) - } - } -} - -impl<'de> Deserialize<'de> for TopicNameHash { - fn deserialize>(deserializer: D) -> Result { - if deserializer.is_human_readable() { - let formatted_string = String::deserialize(deserializer)?; - TopicNameHash::from_formatted_str(&formatted_string).map_err(SerdeError::custom) - } else { - let bytes = <[u8; TOPIC_NAME_HASH_LENGTH]>::deserialize(deserializer)?; - Ok(TopicNameHash(bytes)) - } - } -} - -impl Display for TopicNameHash { - fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result { - write!(f, "{}", base16::encode_lower(&self.0)) - } -} - -impl Debug for TopicNameHash { - fn fmt(&self, f: &mut Formatter) -> core::fmt::Result { - write!(f, "MessageTopicHash({})", base16::encode_lower(&self.0)) - } -} - -impl Distribution for Standard { - fn sample(&self, rng: &mut R) -> TopicNameHash { - TopicNameHash(rng.gen()) - } -} - -impl From<[u8; TOPIC_NAME_HASH_LENGTH]> for TopicNameHash { - fn from(value: [u8; TOPIC_NAME_HASH_LENGTH]) -> Self { - TopicNameHash(value) - } -} - -/// Summary of a message topic that will be stored in global state. -#[derive(Eq, PartialEq, Clone, Debug, Serialize, Deserialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -pub struct MessageTopicSummary { - /// Number of messages in this topic. - pub(crate) message_count: u32, - /// Block timestamp in which these messages were emitted. - pub(crate) blocktime: BlockTime, -} - -impl MessageTopicSummary { - /// Creates a new topic summary. - pub fn new(message_count: u32, blocktime: BlockTime) -> Self { - Self { - message_count, - blocktime, - } - } - - /// Returns the number of messages that were sent on this topic. - pub fn message_count(&self) -> u32 { - self.message_count - } - - /// Returns the block time. - pub fn blocktime(&self) -> BlockTime { - self.blocktime - } -} - -impl ToBytes for MessageTopicSummary { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - buffer.append(&mut self.message_count.to_bytes()?); - buffer.append(&mut self.blocktime.to_bytes()?); - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - self.message_count.serialized_length() + self.blocktime.serialized_length() - } -} - -impl FromBytes for MessageTopicSummary { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (message_count, rem) = FromBytes::from_bytes(bytes)?; - let (blocktime, rem) = FromBytes::from_bytes(rem)?; - Ok(( - MessageTopicSummary { - message_count, - blocktime, - }, - rem, - )) - } -} - -const TOPIC_OPERATION_ADD_TAG: u8 = 0; -const OPERATION_MAX_SERIALIZED_LEN: usize = 1; - -/// Operations that can be performed on message topics. -#[derive(Debug, PartialEq)] -pub enum MessageTopicOperation { - /// Add a new message topic. - Add, -} - -impl MessageTopicOperation { - /// Maximum serialized length of a message topic operation. - pub const fn max_serialized_len() -> usize { - OPERATION_MAX_SERIALIZED_LEN - } -} - -impl ToBytes for MessageTopicOperation { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - match self { - MessageTopicOperation::Add => buffer.push(TOPIC_OPERATION_ADD_TAG), - } - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - match self { - MessageTopicOperation::Add => 1, - } - } -} - -impl FromBytes for MessageTopicOperation { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (tag, remainder): (u8, &[u8]) = FromBytes::from_bytes(bytes)?; - match tag { - TOPIC_OPERATION_ADD_TAG => Ok((MessageTopicOperation::Add, remainder)), - _ => Err(bytesrepr::Error::Formatting), - } - } -} - -#[cfg(test)] -mod tests { - use crate::bytesrepr; - - use super::*; - - #[test] - fn serialization_roundtrip() { - let topic_name_hash = TopicNameHash::new([0x4du8; TOPIC_NAME_HASH_LENGTH]); - bytesrepr::test_serialization_roundtrip(&topic_name_hash); - - let topic_summary = MessageTopicSummary::new(10, BlockTime::new(100)); - bytesrepr::test_serialization_roundtrip(&topic_summary); - - let topic_operation = MessageTopicOperation::Add; - bytesrepr::test_serialization_roundtrip(&topic_operation); - } -} diff --git a/casper_types_ver_2_0/src/contract_wasm.rs b/casper_types_ver_2_0/src/contract_wasm.rs deleted file mode 100644 index 57019cde..00000000 --- a/casper_types_ver_2_0/src/contract_wasm.rs +++ /dev/null @@ -1,373 +0,0 @@ -use alloc::{format, string::String, vec::Vec}; -use core::{ - array::TryFromSliceError, - convert::TryFrom, - fmt::{self, Debug, Display, Formatter}, -}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(feature = "json-schema")] -use schemars::{gen::SchemaGenerator, schema::Schema, JsonSchema}; -use serde::{de::Error as SerdeError, Deserialize, Deserializer, Serialize, Serializer}; - -use crate::{ - account, - addressable_entity::TryFromSliceForAccountHashError, - bytesrepr::{Bytes, Error, FromBytes, ToBytes}, - checksummed_hex, uref, ByteCode, ByteCodeKind, CLType, CLTyped, HashAddr, -}; - -const CONTRACT_WASM_MAX_DISPLAY_LEN: usize = 16; -const KEY_HASH_LENGTH: usize = 32; -const WASM_STRING_PREFIX: &str = "contract-wasm-"; - -/// Associated error type of `TryFrom<&[u8]>` for `ContractWasmHash`. -#[derive(Debug)] -pub struct TryFromSliceForContractHashError(()); - -#[derive(Debug)] -#[non_exhaustive] -pub enum FromStrError { - InvalidPrefix, - Hex(base16::DecodeError), - Account(TryFromSliceForAccountHashError), - Hash(TryFromSliceError), - AccountHash(account::FromStrError), - URef(uref::FromStrError), -} - -impl From for FromStrError { - fn from(error: base16::DecodeError) -> Self { - FromStrError::Hex(error) - } -} - -impl From for FromStrError { - fn from(error: TryFromSliceForAccountHashError) -> Self { - FromStrError::Account(error) - } -} - -impl From for FromStrError { - fn from(error: TryFromSliceError) -> Self { - FromStrError::Hash(error) - } -} - -impl From for FromStrError { - fn from(error: account::FromStrError) -> Self { - FromStrError::AccountHash(error) - } -} - -impl From for FromStrError { - fn from(error: uref::FromStrError) -> Self { - FromStrError::URef(error) - } -} - -impl Display for FromStrError { - fn fmt(&self, f: &mut Formatter) -> fmt::Result { - match self { - FromStrError::InvalidPrefix => write!(f, "invalid prefix"), - FromStrError::Hex(error) => write!(f, "decode from hex: {}", error), - FromStrError::Account(error) => write!(f, "account from string error: {:?}", error), - FromStrError::Hash(error) => write!(f, "hash from string error: {}", error), - FromStrError::AccountHash(error) => { - write!(f, "account hash from string error: {:?}", error) - } - FromStrError::URef(error) => write!(f, "uref from string error: {:?}", error), - } - } -} - -/// A newtype wrapping a `HashAddr` which is the raw bytes of -/// the ContractWasmHash -#[derive(Default, PartialOrd, Ord, PartialEq, Eq, Hash, Clone, Copy)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -pub struct ContractWasmHash(HashAddr); - -impl ContractWasmHash { - /// Constructs a new `ContractWasmHash` from the raw bytes of the contract wasm hash. - pub const fn new(value: HashAddr) -> ContractWasmHash { - ContractWasmHash(value) - } - - /// Returns the raw bytes of the contract hash as an array. - pub fn value(&self) -> HashAddr { - self.0 - } - - /// Returns the raw bytes of the contract hash as a `slice`. - pub fn as_bytes(&self) -> &[u8] { - &self.0 - } - - /// Formats the `ContractWasmHash` for users getting and putting. - pub fn to_formatted_string(self) -> String { - format!("{}{}", WASM_STRING_PREFIX, base16::encode_lower(&self.0),) - } - - /// Parses a string formatted as per `Self::to_formatted_string()` into a - /// `ContractWasmHash`. - pub fn from_formatted_str(input: &str) -> Result { - let remainder = input - .strip_prefix(WASM_STRING_PREFIX) - .ok_or(FromStrError::InvalidPrefix)?; - let bytes = HashAddr::try_from(checksummed_hex::decode(remainder)?.as_ref())?; - Ok(ContractWasmHash(bytes)) - } -} - -impl Display for ContractWasmHash { - fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result { - write!(f, "{}", base16::encode_lower(&self.0)) - } -} - -impl Debug for ContractWasmHash { - fn fmt(&self, f: &mut Formatter) -> core::fmt::Result { - write!(f, "ContractWasmHash({})", base16::encode_lower(&self.0)) - } -} - -impl CLTyped for ContractWasmHash { - fn cl_type() -> CLType { - CLType::ByteArray(KEY_HASH_LENGTH as u32) - } -} - -impl ToBytes for ContractWasmHash { - #[inline(always)] - fn to_bytes(&self) -> Result, Error> { - self.0.to_bytes() - } - - #[inline(always)] - fn serialized_length(&self) -> usize { - self.0.serialized_length() - } - - #[inline(always)] - fn write_bytes(&self, writer: &mut Vec) -> Result<(), Error> { - self.0.write_bytes(writer)?; - Ok(()) - } -} - -impl FromBytes for ContractWasmHash { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (bytes, rem) = FromBytes::from_bytes(bytes)?; - Ok((ContractWasmHash::new(bytes), rem)) - } -} - -impl From<[u8; 32]> for ContractWasmHash { - fn from(bytes: [u8; 32]) -> Self { - ContractWasmHash(bytes) - } -} - -impl Serialize for ContractWasmHash { - fn serialize(&self, serializer: S) -> Result { - if serializer.is_human_readable() { - self.to_formatted_string().serialize(serializer) - } else { - self.0.serialize(serializer) - } - } -} - -impl<'de> Deserialize<'de> for ContractWasmHash { - fn deserialize>(deserializer: D) -> Result { - if deserializer.is_human_readable() { - let formatted_string = String::deserialize(deserializer)?; - ContractWasmHash::from_formatted_str(&formatted_string).map_err(SerdeError::custom) - } else { - let bytes = HashAddr::deserialize(deserializer)?; - Ok(ContractWasmHash(bytes)) - } - } -} - -impl AsRef<[u8]> for ContractWasmHash { - fn as_ref(&self) -> &[u8] { - self.0.as_ref() - } -} - -impl TryFrom<&[u8]> for ContractWasmHash { - type Error = TryFromSliceForContractHashError; - - fn try_from(bytes: &[u8]) -> Result { - HashAddr::try_from(bytes) - .map(ContractWasmHash::new) - .map_err(|_| TryFromSliceForContractHashError(())) - } -} - -impl TryFrom<&Vec> for ContractWasmHash { - type Error = TryFromSliceForContractHashError; - - fn try_from(bytes: &Vec) -> Result { - HashAddr::try_from(bytes as &[u8]) - .map(ContractWasmHash::new) - .map_err(|_| TryFromSliceForContractHashError(())) - } -} - -#[cfg(feature = "json-schema")] -impl JsonSchema for ContractWasmHash { - fn schema_name() -> String { - String::from("ContractWasmHash") - } - - fn json_schema(gen: &mut SchemaGenerator) -> Schema { - let schema = gen.subschema_for::(); - let mut schema_object = schema.into_object(); - schema_object.metadata().description = - Some("The hash address of the contract wasm".to_string()); - schema_object.into() - } -} - -/// A container for contract's WASM bytes. -#[derive(PartialEq, Eq, Clone, Serialize, Deserialize)] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[cfg_attr(feature = "datasize", derive(DataSize))] -pub struct ContractWasm { - bytes: Bytes, -} - -impl ContractWasm { - #[cfg(test)] - pub fn new(bytes: Vec) -> Self { - Self { - bytes: bytes.into(), - } - } - - fn take_bytes(self) -> Vec { - self.bytes.into() - } -} - -impl Debug for ContractWasm { - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - if self.bytes.len() > CONTRACT_WASM_MAX_DISPLAY_LEN { - write!( - f, - "ContractWasm(0x{}...)", - base16::encode_lower(&self.bytes[..CONTRACT_WASM_MAX_DISPLAY_LEN]) - ) - } else { - write!(f, "ContractWasm(0x{})", base16::encode_lower(&self.bytes)) - } - } -} - -impl ToBytes for ContractWasm { - fn to_bytes(&self) -> Result, Error> { - self.bytes.to_bytes() - } - - fn serialized_length(&self) -> usize { - self.bytes.serialized_length() - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), Error> { - self.bytes.write_bytes(writer)?; - Ok(()) - } -} - -impl FromBytes for ContractWasm { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (bytes, rem1) = FromBytes::from_bytes(bytes)?; - Ok((ContractWasm { bytes }, rem1)) - } -} - -impl From for ByteCode { - fn from(value: ContractWasm) -> Self { - ByteCode::new(ByteCodeKind::V1CasperWasm, value.take_bytes()) - } -} - -#[cfg(test)] -mod tests { - use super::*; - #[test] - fn test_debug_repr_of_short_wasm() { - const SIZE: usize = 8; - let wasm_bytes = vec![0; SIZE]; - let contract_wasm = ContractWasm::new(wasm_bytes); - // String output is less than the bytes itself - assert_eq!( - format!("{:?}", contract_wasm), - "ContractWasm(0x0000000000000000)" - ); - } - - #[test] - fn test_debug_repr_of_long_wasm() { - const SIZE: usize = 65; - let wasm_bytes = vec![0; SIZE]; - let contract_wasm = ContractWasm::new(wasm_bytes); - // String output is less than the bytes itself - assert_eq!( - format!("{:?}", contract_wasm), - "ContractWasm(0x00000000000000000000000000000000...)" - ); - } - - #[test] - fn contract_wasm_hash_from_slice() { - let bytes: Vec = (0..32).collect(); - let contract_hash = - HashAddr::try_from(&bytes[..]).expect("should create contract wasm hash"); - let contract_hash = ContractWasmHash::new(contract_hash); - assert_eq!(&bytes, &contract_hash.as_bytes()); - } - - #[test] - fn contract_wasm_hash_from_str() { - let contract_hash = ContractWasmHash([3; 32]); - let encoded = contract_hash.to_formatted_string(); - let decoded = ContractWasmHash::from_formatted_str(&encoded).unwrap(); - assert_eq!(contract_hash, decoded); - - let invalid_prefix = - "contractwasm-0000000000000000000000000000000000000000000000000000000000000000"; - assert!(ContractWasmHash::from_formatted_str(invalid_prefix).is_err()); - - let short_addr = - "contract-wasm-00000000000000000000000000000000000000000000000000000000000000"; - assert!(ContractWasmHash::from_formatted_str(short_addr).is_err()); - - let long_addr = - "contract-wasm-000000000000000000000000000000000000000000000000000000000000000000"; - assert!(ContractWasmHash::from_formatted_str(long_addr).is_err()); - - let invalid_hex = - "contract-wasm-000000000000000000000000000000000000000000000000000000000000000g"; - assert!(ContractWasmHash::from_formatted_str(invalid_hex).is_err()); - } - - #[test] - fn contract_wasm_hash_serde_roundtrip() { - let contract_hash = ContractWasmHash([255; 32]); - let serialized = bincode::serialize(&contract_hash).unwrap(); - let deserialized = bincode::deserialize(&serialized).unwrap(); - assert_eq!(contract_hash, deserialized) - } - - #[test] - fn contract_wasm_hash_json_roundtrip() { - let contract_hash = ContractWasmHash([255; 32]); - let json_string = serde_json::to_string_pretty(&contract_hash).unwrap(); - let decoded = serde_json::from_str(&json_string).unwrap(); - assert_eq!(contract_hash, decoded) - } -} diff --git a/casper_types_ver_2_0/src/contracts.rs b/casper_types_ver_2_0/src/contracts.rs deleted file mode 100644 index 02df4fc5..00000000 --- a/casper_types_ver_2_0/src/contracts.rs +++ /dev/null @@ -1,1308 +0,0 @@ -//! Data types for supporting contract headers feature. -// TODO - remove once schemars stops causing warning. -#![allow(clippy::field_reassign_with_default)] - -use alloc::{ - collections::{BTreeMap, BTreeSet}, - format, - string::String, - vec::Vec, -}; -use core::{ - array::TryFromSliceError, - convert::TryFrom, - fmt::{self, Debug, Display, Formatter}, -}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(feature = "json-schema")] -use schemars::{gen::SchemaGenerator, schema::Schema, JsonSchema}; -use serde::{de::Error as SerdeError, Deserialize, Deserializer, Serialize, Serializer}; - -use crate::{ - account, - addressable_entity::{NamedKeys, TryFromSliceForAccountHashError}, - bytesrepr::{self, FromBytes, ToBytes, U32_SERIALIZED_LENGTH}, - checksummed_hex, - contract_wasm::ContractWasmHash, - package::{PackageKind, PackageStatus}, - uref, - uref::URef, - AddressableEntityHash, CLType, CLTyped, EntityVersionKey, EntryPoint, EntryPoints, Groups, - HashAddr, Key, Package, ProtocolVersion, KEY_HASH_LENGTH, -}; - -/// Maximum number of distinct user groups. -pub const MAX_GROUPS: u8 = 10; -/// Maximum number of URefs which can be assigned across all user groups. -pub const MAX_TOTAL_UREFS: usize = 100; - -const CONTRACT_STRING_PREFIX: &str = "contract-"; -const PACKAGE_STRING_PREFIX: &str = "contract-package-"; -// We need to support the legacy prefix of "contract-package-wasm". -const PACKAGE_STRING_LEGACY_EXTRA_PREFIX: &str = "wasm"; - -/// Set of errors which may happen when working with contract headers. -#[derive(Debug, PartialEq, Eq)] -#[repr(u8)] -#[non_exhaustive] -pub enum Error { - /// Attempt to override an existing or previously existing version with a - /// new header (this is not allowed to ensure immutability of a given - /// version). - /// ``` - /// # use casper_types_ver_2_0::contracts::Error; - /// assert_eq!(1, Error::PreviouslyUsedVersion as u8); - /// ``` - PreviouslyUsedVersion = 1, - /// Attempted to disable a contract that does not exist. - /// ``` - /// # use casper_types_ver_2_0::contracts::Error; - /// assert_eq!(2, Error::ContractNotFound as u8); - /// ``` - ContractNotFound = 2, - /// Attempted to create a user group which already exists (use the update - /// function to change an existing user group). - /// ``` - /// # use casper_types_ver_2_0::contracts::Error; - /// assert_eq!(3, Error::GroupAlreadyExists as u8); - /// ``` - GroupAlreadyExists = 3, - /// Attempted to add a new user group which exceeds the allowed maximum - /// number of groups. - /// ``` - /// # use casper_types_ver_2_0::contracts::Error; - /// assert_eq!(4, Error::MaxGroupsExceeded as u8); - /// ``` - MaxGroupsExceeded = 4, - /// Attempted to add a new URef to a group, which resulted in the total - /// number of URefs across all user groups to exceed the allowed maximum. - /// ``` - /// # use casper_types_ver_2_0::contracts::Error; - /// assert_eq!(5, Error::MaxTotalURefsExceeded as u8); - /// ``` - MaxTotalURefsExceeded = 5, - /// Attempted to remove a URef from a group, which does not exist in the - /// group. - /// ``` - /// # use casper_types_ver_2_0::contracts::Error; - /// assert_eq!(6, Error::GroupDoesNotExist as u8); - /// ``` - GroupDoesNotExist = 6, - /// Attempted to remove unknown URef from the group. - /// ``` - /// # use casper_types_ver_2_0::contracts::Error; - /// assert_eq!(7, Error::UnableToRemoveURef as u8); - /// ``` - UnableToRemoveURef = 7, - /// Group is use by at least one active contract. - /// ``` - /// # use casper_types_ver_2_0::contracts::Error; - /// assert_eq!(8, Error::GroupInUse as u8); - /// ``` - GroupInUse = 8, - /// URef already exists in given group. - /// ``` - /// # use casper_types_ver_2_0::contracts::Error; - /// assert_eq!(9, Error::URefAlreadyExists as u8); - /// ``` - URefAlreadyExists = 9, -} - -impl TryFrom for Error { - type Error = (); - - fn try_from(value: u8) -> Result { - let error = match value { - v if v == Self::PreviouslyUsedVersion as u8 => Self::PreviouslyUsedVersion, - v if v == Self::ContractNotFound as u8 => Self::ContractNotFound, - v if v == Self::GroupAlreadyExists as u8 => Self::GroupAlreadyExists, - v if v == Self::MaxGroupsExceeded as u8 => Self::MaxGroupsExceeded, - v if v == Self::MaxTotalURefsExceeded as u8 => Self::MaxTotalURefsExceeded, - v if v == Self::GroupDoesNotExist as u8 => Self::GroupDoesNotExist, - v if v == Self::UnableToRemoveURef as u8 => Self::UnableToRemoveURef, - v if v == Self::GroupInUse as u8 => Self::GroupInUse, - v if v == Self::URefAlreadyExists as u8 => Self::URefAlreadyExists, - _ => return Err(()), - }; - Ok(error) - } -} - -/// Associated error type of `TryFrom<&[u8]>` for `ContractHash`. -#[derive(Debug)] -pub struct TryFromSliceForContractHashError(()); - -impl Display for TryFromSliceForContractHashError { - fn fmt(&self, f: &mut Formatter) -> fmt::Result { - write!(f, "failed to retrieve from slice") - } -} - -/// An error from parsing a formatted contract string -#[derive(Debug)] -#[non_exhaustive] -pub enum FromStrError { - /// Invalid formatted string prefix. - InvalidPrefix, - /// Error when decoding a hex string - Hex(base16::DecodeError), - /// Error when parsing an account - Account(TryFromSliceForAccountHashError), - /// Error when parsing the hash. - Hash(TryFromSliceError), - /// Error when parsing an account hash. - AccountHash(account::FromStrError), - /// Error when parsing an uref. - URef(uref::FromStrError), -} - -impl From for FromStrError { - fn from(error: base16::DecodeError) -> Self { - FromStrError::Hex(error) - } -} - -impl From for FromStrError { - fn from(error: TryFromSliceForAccountHashError) -> Self { - FromStrError::Account(error) - } -} - -impl From for FromStrError { - fn from(error: TryFromSliceError) -> Self { - FromStrError::Hash(error) - } -} - -impl From for FromStrError { - fn from(error: account::FromStrError) -> Self { - FromStrError::AccountHash(error) - } -} - -impl From for FromStrError { - fn from(error: uref::FromStrError) -> Self { - FromStrError::URef(error) - } -} - -impl Display for FromStrError { - fn fmt(&self, f: &mut Formatter) -> fmt::Result { - match self { - FromStrError::InvalidPrefix => write!(f, "invalid prefix"), - FromStrError::Hex(error) => write!(f, "decode from hex: {}", error), - FromStrError::Account(error) => write!(f, "account from string error: {:?}", error), - FromStrError::Hash(error) => write!(f, "hash from string error: {}", error), - FromStrError::AccountHash(error) => { - write!(f, "account hash from string error: {:?}", error) - } - FromStrError::URef(error) => write!(f, "uref from string error: {:?}", error), - } - } -} - -/// Automatically incremented value for a contract version within a major `ProtocolVersion`. -pub type ContractVersion = u32; - -/// Within each discrete major `ProtocolVersion`, contract version resets to this value. -pub const CONTRACT_INITIAL_VERSION: ContractVersion = 1; - -/// Major element of `ProtocolVersion` a `ContractVersion` is compatible with. -pub type ProtocolVersionMajor = u32; - -/// Major element of `ProtocolVersion` combined with `ContractVersion`. -#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[cfg_attr(feature = "datasize", derive(DataSize))] -pub struct ContractVersionKey(ProtocolVersionMajor, ContractVersion); - -impl ContractVersionKey { - /// Returns a new instance of ContractVersionKey with provided values. - pub fn new( - protocol_version_major: ProtocolVersionMajor, - contract_version: ContractVersion, - ) -> Self { - Self(protocol_version_major, contract_version) - } - - /// Returns the major element of the protocol version this contract is compatible with. - pub fn protocol_version_major(self) -> ProtocolVersionMajor { - self.0 - } - - /// Returns the contract version within the protocol major version. - pub fn contract_version(self) -> ContractVersion { - self.1 - } -} - -impl From for (ProtocolVersionMajor, ContractVersion) { - fn from(contract_version_key: ContractVersionKey) -> Self { - (contract_version_key.0, contract_version_key.1) - } -} - -/// Serialized length of `ContractVersionKey`. -pub const CONTRACT_VERSION_KEY_SERIALIZED_LENGTH: usize = - U32_SERIALIZED_LENGTH + U32_SERIALIZED_LENGTH; - -impl ToBytes for ContractVersionKey { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut ret = bytesrepr::unchecked_allocate_buffer(self); - ret.append(&mut self.0.to_bytes()?); - ret.append(&mut self.1.to_bytes()?); - Ok(ret) - } - - fn serialized_length(&self) -> usize { - CONTRACT_VERSION_KEY_SERIALIZED_LENGTH - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.0.write_bytes(writer)?; - self.1.write_bytes(writer)?; - Ok(()) - } -} - -impl FromBytes for ContractVersionKey { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (major, rem): (u32, &[u8]) = FromBytes::from_bytes(bytes)?; - let (contract, rem): (ContractVersion, &[u8]) = FromBytes::from_bytes(rem)?; - Ok((ContractVersionKey::new(major, contract), rem)) - } -} - -impl fmt::Display for ContractVersionKey { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{}.{}", self.0, self.1) - } -} - -/// Collection of contract versions. -pub type ContractVersions = BTreeMap; - -/// Collection of disabled contract versions. The runtime will not permit disabled -/// contract versions to be executed. -pub type DisabledVersions = BTreeSet; - -/// A newtype wrapping a `HashAddr` which references a [`Contract`] in the global state. -#[derive(Default, PartialOrd, Ord, PartialEq, Eq, Hash, Clone, Copy)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -pub struct ContractHash(HashAddr); - -impl ContractHash { - /// Constructs a new `ContractHash` from the raw bytes of the contract hash. - pub const fn new(value: HashAddr) -> ContractHash { - ContractHash(value) - } - - /// Returns the raw bytes of the contract hash as an array. - pub fn value(&self) -> HashAddr { - self.0 - } - - /// Returns the raw bytes of the contract hash as a `slice`. - pub fn as_bytes(&self) -> &[u8] { - &self.0 - } - - /// Formats the `ContractHash` for users getting and putting. - pub fn to_formatted_string(self) -> String { - format!( - "{}{}", - CONTRACT_STRING_PREFIX, - base16::encode_lower(&self.0), - ) - } - - /// Parses a string formatted as per `Self::to_formatted_string()` into a - /// `ContractHash`. - pub fn from_formatted_str(input: &str) -> Result { - let remainder = input - .strip_prefix(CONTRACT_STRING_PREFIX) - .ok_or(FromStrError::InvalidPrefix)?; - let bytes = HashAddr::try_from(checksummed_hex::decode(remainder)?.as_ref())?; - Ok(ContractHash(bytes)) - } -} - -impl Display for ContractHash { - fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result { - write!(f, "{}", base16::encode_lower(&self.0)) - } -} - -impl Debug for ContractHash { - fn fmt(&self, f: &mut Formatter) -> core::fmt::Result { - write!(f, "ContractHash({})", base16::encode_lower(&self.0)) - } -} - -impl CLTyped for ContractHash { - fn cl_type() -> CLType { - CLType::ByteArray(KEY_HASH_LENGTH as u32) - } -} - -impl ToBytes for ContractHash { - #[inline(always)] - fn to_bytes(&self) -> Result, bytesrepr::Error> { - self.0.to_bytes() - } - - #[inline(always)] - fn serialized_length(&self) -> usize { - self.0.serialized_length() - } - - #[inline(always)] - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - writer.extend_from_slice(&self.0); - Ok(()) - } -} - -impl FromBytes for ContractHash { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (bytes, rem) = FromBytes::from_bytes(bytes)?; - Ok((ContractHash::new(bytes), rem)) - } -} - -impl From<[u8; 32]> for ContractHash { - fn from(bytes: [u8; 32]) -> Self { - ContractHash(bytes) - } -} - -impl Serialize for ContractHash { - fn serialize(&self, serializer: S) -> Result { - if serializer.is_human_readable() { - self.to_formatted_string().serialize(serializer) - } else { - self.0.serialize(serializer) - } - } -} - -impl<'de> Deserialize<'de> for ContractHash { - fn deserialize>(deserializer: D) -> Result { - if deserializer.is_human_readable() { - let formatted_string = String::deserialize(deserializer)?; - ContractHash::from_formatted_str(&formatted_string).map_err(SerdeError::custom) - } else { - let bytes = HashAddr::deserialize(deserializer)?; - Ok(ContractHash(bytes)) - } - } -} - -impl AsRef<[u8]> for ContractHash { - fn as_ref(&self) -> &[u8] { - self.0.as_ref() - } -} - -impl TryFrom<&[u8]> for ContractHash { - type Error = TryFromSliceForContractHashError; - - fn try_from(bytes: &[u8]) -> Result { - HashAddr::try_from(bytes) - .map(ContractHash::new) - .map_err(|_| TryFromSliceForContractHashError(())) - } -} - -impl TryFrom<&Vec> for ContractHash { - type Error = TryFromSliceForContractHashError; - - fn try_from(bytes: &Vec) -> Result { - HashAddr::try_from(bytes as &[u8]) - .map(ContractHash::new) - .map_err(|_| TryFromSliceForContractHashError(())) - } -} - -#[cfg(feature = "json-schema")] -impl JsonSchema for ContractHash { - fn schema_name() -> String { - String::from("ContractHash") - } - - fn json_schema(gen: &mut SchemaGenerator) -> Schema { - let schema = gen.subschema_for::(); - let mut schema_object = schema.into_object(); - schema_object.metadata().description = Some("The hash address of the contract".to_string()); - schema_object.into() - } -} - -/// A newtype wrapping a `HashAddr` which references a [`ContractPackage`] in the global state. -#[derive(Default, PartialOrd, Ord, PartialEq, Eq, Hash, Clone, Copy)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -pub struct ContractPackageHash(HashAddr); - -impl ContractPackageHash { - /// Constructs a new `ContractPackageHash` from the raw bytes of the contract package hash. - pub const fn new(value: HashAddr) -> ContractPackageHash { - ContractPackageHash(value) - } - - /// Returns the raw bytes of the contract hash as an array. - pub fn value(&self) -> HashAddr { - self.0 - } - - /// Returns the raw bytes of the contract hash as a `slice`. - pub fn as_bytes(&self) -> &[u8] { - &self.0 - } - - /// Formats the `ContractPackageHash` for users getting and putting. - pub fn to_formatted_string(self) -> String { - format!("{}{}", PACKAGE_STRING_PREFIX, base16::encode_lower(&self.0),) - } - - /// Parses a string formatted as per `Self::to_formatted_string()` into a - /// `ContractPackageHash`. - pub fn from_formatted_str(input: &str) -> Result { - let remainder = input - .strip_prefix(PACKAGE_STRING_PREFIX) - .ok_or(FromStrError::InvalidPrefix)?; - - let hex_addr = remainder - .strip_prefix(PACKAGE_STRING_LEGACY_EXTRA_PREFIX) - .unwrap_or(remainder); - - let bytes = HashAddr::try_from(checksummed_hex::decode(hex_addr)?.as_ref())?; - Ok(ContractPackageHash(bytes)) - } -} - -impl Display for ContractPackageHash { - fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result { - write!(f, "{}", base16::encode_lower(&self.0)) - } -} - -impl Debug for ContractPackageHash { - fn fmt(&self, f: &mut Formatter) -> core::fmt::Result { - write!(f, "ContractPackageHash({})", base16::encode_lower(&self.0)) - } -} - -impl CLTyped for ContractPackageHash { - fn cl_type() -> CLType { - CLType::ByteArray(KEY_HASH_LENGTH as u32) - } -} - -impl ToBytes for ContractPackageHash { - #[inline(always)] - fn to_bytes(&self) -> Result, bytesrepr::Error> { - self.0.to_bytes() - } - - #[inline(always)] - fn serialized_length(&self) -> usize { - self.0.serialized_length() - } - - #[inline(always)] - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - writer.extend_from_slice(&self.0); - Ok(()) - } -} - -impl FromBytes for ContractPackageHash { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (bytes, rem) = FromBytes::from_bytes(bytes)?; - Ok((ContractPackageHash::new(bytes), rem)) - } -} - -impl From<[u8; 32]> for ContractPackageHash { - fn from(bytes: [u8; 32]) -> Self { - ContractPackageHash(bytes) - } -} - -impl Serialize for ContractPackageHash { - fn serialize(&self, serializer: S) -> Result { - if serializer.is_human_readable() { - self.to_formatted_string().serialize(serializer) - } else { - self.0.serialize(serializer) - } - } -} - -impl<'de> Deserialize<'de> for ContractPackageHash { - fn deserialize>(deserializer: D) -> Result { - if deserializer.is_human_readable() { - let formatted_string = String::deserialize(deserializer)?; - ContractPackageHash::from_formatted_str(&formatted_string).map_err(SerdeError::custom) - } else { - let bytes = HashAddr::deserialize(deserializer)?; - Ok(ContractPackageHash(bytes)) - } - } -} - -impl AsRef<[u8]> for ContractPackageHash { - fn as_ref(&self) -> &[u8] { - self.0.as_ref() - } -} - -impl TryFrom<&[u8]> for ContractPackageHash { - type Error = TryFromSliceForContractHashError; - - fn try_from(bytes: &[u8]) -> Result { - HashAddr::try_from(bytes) - .map(ContractPackageHash::new) - .map_err(|_| TryFromSliceForContractHashError(())) - } -} - -impl TryFrom<&Vec> for ContractPackageHash { - type Error = TryFromSliceForContractHashError; - - fn try_from(bytes: &Vec) -> Result { - HashAddr::try_from(bytes as &[u8]) - .map(ContractPackageHash::new) - .map_err(|_| TryFromSliceForContractHashError(())) - } -} - -#[cfg(feature = "json-schema")] -impl JsonSchema for ContractPackageHash { - fn schema_name() -> String { - String::from("ContractPackageHash") - } - - fn json_schema(gen: &mut SchemaGenerator) -> Schema { - let schema = gen.subschema_for::(); - let mut schema_object = schema.into_object(); - schema_object.metadata().description = - Some("The hash address of the contract package".to_string()); - schema_object.into() - } -} - -/// A enum to determine the lock status of the contract package. -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -pub enum ContractPackageStatus { - /// The package is locked and cannot be versioned. - Locked, - /// The package is unlocked and can be versioned. - Unlocked, -} - -impl ContractPackageStatus { - /// Create a new status flag based on a boolean value - pub fn new(is_locked: bool) -> Self { - if is_locked { - ContractPackageStatus::Locked - } else { - ContractPackageStatus::Unlocked - } - } -} - -impl Default for ContractPackageStatus { - fn default() -> Self { - Self::Unlocked - } -} - -impl ToBytes for ContractPackageStatus { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut result = bytesrepr::allocate_buffer(self)?; - match self { - ContractPackageStatus::Unlocked => result.append(&mut false.to_bytes()?), - ContractPackageStatus::Locked => result.append(&mut true.to_bytes()?), - } - Ok(result) - } - - fn serialized_length(&self) -> usize { - match self { - ContractPackageStatus::Unlocked => false.serialized_length(), - ContractPackageStatus::Locked => true.serialized_length(), - } - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - match self { - ContractPackageStatus::Locked => writer.push(u8::from(true)), - ContractPackageStatus::Unlocked => writer.push(u8::from(false)), - } - Ok(()) - } -} - -impl FromBytes for ContractPackageStatus { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (val, bytes) = bool::from_bytes(bytes)?; - let status = ContractPackageStatus::new(val); - Ok((status, bytes)) - } -} - -/// Contract definition, metadata, and security container. -#[derive(Debug, Clone, PartialEq, Eq, Default, Serialize, Deserialize)] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[cfg_attr(feature = "datasize", derive(DataSize))] -pub struct ContractPackage { - /// Key used to add or disable versions - access_key: URef, - /// All versions (enabled & disabled) - versions: ContractVersions, - /// Disabled versions - disabled_versions: DisabledVersions, - /// Mapping maintaining the set of URefs associated with each "user - /// group". This can be used to control access to methods in a particular - /// version of the contract. A method is callable by any context which - /// "knows" any of the URefs associated with the method's user group. - groups: Groups, - /// A flag that determines whether a contract is locked - lock_status: ContractPackageStatus, -} - -impl CLTyped for ContractPackage { - fn cl_type() -> CLType { - CLType::Any - } -} - -impl ContractPackage { - /// Create new `ContractPackage` (with no versions) from given access key. - pub fn new( - access_key: URef, - versions: ContractVersions, - disabled_versions: DisabledVersions, - groups: Groups, - lock_status: ContractPackageStatus, - ) -> Self { - ContractPackage { - access_key, - versions, - disabled_versions, - groups, - lock_status, - } - } - - /// Get the access key for this contract. - pub fn access_key(&self) -> URef { - self.access_key - } - - /// Get the group definitions for this contract. - pub fn groups(&self) -> &Groups { - &self.groups - } - - /// Returns reference to all of this contract's versions. - pub fn versions(&self) -> &ContractVersions { - &self.versions - } - - /// Returns mutable reference to all of this contract's versions (enabled and disabled). - pub fn versions_mut(&mut self) -> &mut ContractVersions { - &mut self.versions - } - - /// Consumes the object and returns all of this contract's versions (enabled and disabled). - pub fn take_versions(self) -> ContractVersions { - self.versions - } - - /// Returns all of this contract's disabled versions. - pub fn disabled_versions(&self) -> &DisabledVersions { - &self.disabled_versions - } - - /// Returns mut reference to all of this contract's disabled versions. - pub fn disabled_versions_mut(&mut self) -> &mut DisabledVersions { - &mut self.disabled_versions - } - - #[cfg(test)] - fn next_contract_version_for(&self, protocol_version: ProtocolVersionMajor) -> ContractVersion { - let current_version = self - .versions - .keys() - .rev() - .find_map(|&contract_version_key| { - if contract_version_key.protocol_version_major() == protocol_version { - Some(contract_version_key.contract_version()) - } else { - None - } - }) - .unwrap_or(0); - - current_version + 1 - } - - #[cfg(test)] - fn insert_contract_version( - &mut self, - protocol_version_major: ProtocolVersionMajor, - contract_hash: ContractHash, - ) -> ContractVersionKey { - let contract_version = self.next_contract_version_for(protocol_version_major); - let key = ContractVersionKey::new(protocol_version_major, contract_version); - self.versions.insert(key, contract_hash); - key - } - - #[cfg(test)] - fn groups_mut(&mut self) -> &mut Groups { - &mut self.groups - } -} - -impl ToBytes for ContractPackage { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut result = bytesrepr::allocate_buffer(self)?; - self.access_key().write_bytes(&mut result)?; - self.versions().write_bytes(&mut result)?; - self.disabled_versions().write_bytes(&mut result)?; - self.groups().write_bytes(&mut result)?; - self.lock_status.write_bytes(&mut result)?; - Ok(result) - } - - fn serialized_length(&self) -> usize { - self.access_key.serialized_length() - + self.versions.serialized_length() - + self.disabled_versions.serialized_length() - + self.groups.serialized_length() - + self.lock_status.serialized_length() - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.access_key().write_bytes(writer)?; - self.versions().write_bytes(writer)?; - self.disabled_versions().write_bytes(writer)?; - self.groups().write_bytes(writer)?; - self.lock_status.write_bytes(writer)?; - Ok(()) - } -} - -impl FromBytes for ContractPackage { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (access_key, bytes) = URef::from_bytes(bytes)?; - let (versions, bytes) = ContractVersions::from_bytes(bytes)?; - let (disabled_versions, bytes) = DisabledVersions::from_bytes(bytes)?; - let (groups, bytes) = Groups::from_bytes(bytes)?; - let (lock_status, bytes) = ContractPackageStatus::from_bytes(bytes)?; - let result = ContractPackage { - access_key, - versions, - disabled_versions, - groups, - lock_status, - }; - - Ok((result, bytes)) - } -} - -impl From for Package { - fn from(value: ContractPackage) -> Self { - let versions: BTreeMap = value - .versions - .into_iter() - .map(|(version, contract_hash)| { - let entity_version = EntityVersionKey::new(2, version.contract_version()); - let entity_hash: AddressableEntityHash = - AddressableEntityHash::new(contract_hash.value()); - (entity_version, entity_hash) - }) - .collect(); - - let disabled_versions = value - .disabled_versions - .into_iter() - .map(|contract_versions| { - EntityVersionKey::new( - contract_versions.protocol_version_major(), - contract_versions.contract_version(), - ) - }) - .collect(); - - let lock_status = if value.lock_status == ContractPackageStatus::Locked { - PackageStatus::Locked - } else { - PackageStatus::Unlocked - }; - - Package::new( - value.access_key, - versions.into(), - disabled_versions, - value.groups, - lock_status, - PackageKind::SmartContract, - ) - } -} - -/// Methods and type signatures supported by a contract. -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -pub struct Contract { - contract_package_hash: ContractPackageHash, - contract_wasm_hash: ContractWasmHash, - named_keys: NamedKeys, - entry_points: EntryPoints, - protocol_version: ProtocolVersion, -} - -impl From - for ( - ContractPackageHash, - ContractWasmHash, - NamedKeys, - EntryPoints, - ProtocolVersion, - ) -{ - fn from(contract: Contract) -> Self { - ( - contract.contract_package_hash, - contract.contract_wasm_hash, - contract.named_keys, - contract.entry_points, - contract.protocol_version, - ) - } -} - -impl Contract { - /// `Contract` constructor. - pub fn new( - contract_package_hash: ContractPackageHash, - contract_wasm_hash: ContractWasmHash, - named_keys: NamedKeys, - entry_points: EntryPoints, - protocol_version: ProtocolVersion, - ) -> Self { - Contract { - contract_package_hash, - contract_wasm_hash, - named_keys, - entry_points, - protocol_version, - } - } - - /// Hash for accessing contract package - pub fn contract_package_hash(&self) -> ContractPackageHash { - self.contract_package_hash - } - - /// Hash for accessing contract WASM - pub fn contract_wasm_hash(&self) -> ContractWasmHash { - self.contract_wasm_hash - } - - /// Checks whether there is a method with the given name - pub fn has_entry_point(&self, name: &str) -> bool { - self.entry_points.has_entry_point(name) - } - - /// Returns the type signature for the given `method`. - pub fn entry_point(&self, method: &str) -> Option<&EntryPoint> { - self.entry_points.get(method) - } - - /// Get the protocol version this header is targeting. - pub fn protocol_version(&self) -> ProtocolVersion { - self.protocol_version - } - - /// Adds new entry point - pub fn add_entry_point>(&mut self, entry_point: EntryPoint) { - self.entry_points.add_entry_point(entry_point); - } - - /// Hash for accessing contract bytes - pub fn contract_wasm_key(&self) -> Key { - self.contract_wasm_hash.into() - } - - /// Returns immutable reference to methods - pub fn entry_points(&self) -> &EntryPoints { - &self.entry_points - } - - /// Takes `named_keys` - pub fn take_named_keys(self) -> NamedKeys { - self.named_keys - } - - /// Returns a reference to `named_keys` - pub fn named_keys(&self) -> &NamedKeys { - &self.named_keys - } - - /// Appends `keys` to `named_keys` - pub fn named_keys_append(&mut self, keys: NamedKeys) { - self.named_keys.append(keys); - } - - /// Removes given named key. - pub fn remove_named_key(&mut self, key: &str) -> Option { - self.named_keys.remove(key) - } - - /// Set protocol_version. - pub fn set_protocol_version(&mut self, protocol_version: ProtocolVersion) { - self.protocol_version = protocol_version; - } - - /// Determines if `Contract` is compatible with a given `ProtocolVersion`. - pub fn is_compatible_protocol_version(&self, protocol_version: ProtocolVersion) -> bool { - self.protocol_version.value().major == protocol_version.value().major - } -} - -impl ToBytes for Contract { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut result = bytesrepr::allocate_buffer(self)?; - self.contract_package_hash().write_bytes(&mut result)?; - self.contract_wasm_hash().write_bytes(&mut result)?; - self.named_keys().write_bytes(&mut result)?; - self.entry_points().write_bytes(&mut result)?; - self.protocol_version().write_bytes(&mut result)?; - Ok(result) - } - - fn serialized_length(&self) -> usize { - ToBytes::serialized_length(&self.entry_points) - + ToBytes::serialized_length(&self.contract_package_hash) - + ToBytes::serialized_length(&self.contract_wasm_hash) - + ToBytes::serialized_length(&self.protocol_version) - + ToBytes::serialized_length(&self.named_keys) - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.contract_package_hash().write_bytes(writer)?; - self.contract_wasm_hash().write_bytes(writer)?; - self.named_keys().write_bytes(writer)?; - self.entry_points().write_bytes(writer)?; - self.protocol_version().write_bytes(writer)?; - Ok(()) - } -} - -impl FromBytes for Contract { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (contract_package_hash, bytes) = FromBytes::from_bytes(bytes)?; - let (contract_wasm_hash, bytes) = FromBytes::from_bytes(bytes)?; - let (named_keys, bytes) = NamedKeys::from_bytes(bytes)?; - let (entry_points, bytes) = EntryPoints::from_bytes(bytes)?; - let (protocol_version, bytes) = ProtocolVersion::from_bytes(bytes)?; - Ok(( - Contract { - contract_package_hash, - contract_wasm_hash, - named_keys, - entry_points, - protocol_version, - }, - bytes, - )) - } -} - -impl Default for Contract { - fn default() -> Self { - Contract { - named_keys: NamedKeys::default(), - entry_points: EntryPoints::default(), - contract_wasm_hash: [0; KEY_HASH_LENGTH].into(), - contract_package_hash: [0; KEY_HASH_LENGTH].into(), - protocol_version: ProtocolVersion::V1_0_0, - } - } -} - -/// Default name for an entry point -pub const DEFAULT_ENTRY_POINT_NAME: &str = "call"; - -/// Default name for an installer entry point -pub const ENTRY_POINT_NAME_INSTALL: &str = "install"; - -/// Default name for an upgrade entry point -pub const UPGRADE_ENTRY_POINT_NAME: &str = "upgrade"; - -#[cfg(test)] -mod tests { - - use super::*; - use crate::{AccessRights, EntryPointAccess, EntryPointType, Group, Parameter, URef}; - use alloc::borrow::ToOwned; - - const CONTRACT_HASH_V1: ContractHash = ContractHash::new([42; 32]); - const CONTRACT_HASH_V2: ContractHash = ContractHash::new([84; 32]); - - fn make_contract_package() -> ContractPackage { - let mut contract_package = ContractPackage::new( - URef::new([0; 32], AccessRights::NONE), - ContractVersions::default(), - DisabledVersions::default(), - Groups::default(), - ContractPackageStatus::default(), - ); - - // add groups - { - let group_urefs = { - let mut ret = BTreeSet::new(); - ret.insert(URef::new([1; 32], AccessRights::READ)); - ret - }; - - contract_package - .groups_mut() - .insert(Group::new("Group 1"), group_urefs.clone()); - - contract_package - .groups_mut() - .insert(Group::new("Group 2"), group_urefs); - } - - // add entry_points - let _entry_points = { - let mut ret = BTreeMap::new(); - let entrypoint = EntryPoint::new( - "method0".to_string(), - vec![], - CLType::U32, - EntryPointAccess::groups(&["Group 2"]), - EntryPointType::Session, - ); - ret.insert(entrypoint.name().to_owned(), entrypoint); - let entrypoint = EntryPoint::new( - "method1".to_string(), - vec![Parameter::new("Foo", CLType::U32)], - CLType::U32, - EntryPointAccess::groups(&["Group 1"]), - EntryPointType::Session, - ); - ret.insert(entrypoint.name().to_owned(), entrypoint); - ret - }; - - let _contract_package_hash = [41; 32]; - let _contract_wasm_hash = [43; 32]; - let _named_keys = NamedKeys::new(); - let protocol_version = ProtocolVersion::V1_0_0; - - let v1 = contract_package - .insert_contract_version(protocol_version.value().major, CONTRACT_HASH_V1); - let v2 = contract_package - .insert_contract_version(protocol_version.value().major, CONTRACT_HASH_V2); - - assert!(v2 > v1); - - contract_package - } - - #[test] - fn roundtrip_serialization() { - let contract_package = make_contract_package(); - let bytes = contract_package.to_bytes().expect("should serialize"); - let (decoded_package, rem) = - ContractPackage::from_bytes(&bytes).expect("should deserialize"); - assert_eq!(contract_package, decoded_package); - assert_eq!(rem.len(), 0); - } - - #[test] - fn contract_hash_from_slice() { - let bytes: Vec = (0..32).collect(); - let contract_hash = HashAddr::try_from(&bytes[..]).expect("should create contract hash"); - let contract_hash = ContractHash::new(contract_hash); - assert_eq!(&bytes, &contract_hash.as_bytes()); - } - - #[test] - fn contract_package_hash_from_slice() { - let bytes: Vec = (0..32).collect(); - let contract_hash = HashAddr::try_from(&bytes[..]).expect("should create contract hash"); - let contract_hash = ContractPackageHash::new(contract_hash); - assert_eq!(&bytes, &contract_hash.as_bytes()); - } - - #[test] - fn contract_hash_from_str() { - let contract_hash = ContractHash([3; 32]); - let encoded = contract_hash.to_formatted_string(); - let decoded = ContractHash::from_formatted_str(&encoded).unwrap(); - assert_eq!(contract_hash, decoded); - - let invalid_prefix = - "contract--0000000000000000000000000000000000000000000000000000000000000000"; - assert!(ContractHash::from_formatted_str(invalid_prefix).is_err()); - - let short_addr = "contract-00000000000000000000000000000000000000000000000000000000000000"; - assert!(ContractHash::from_formatted_str(short_addr).is_err()); - - let long_addr = - "contract-000000000000000000000000000000000000000000000000000000000000000000"; - assert!(ContractHash::from_formatted_str(long_addr).is_err()); - - let invalid_hex = - "contract-000000000000000000000000000000000000000000000000000000000000000g"; - assert!(ContractHash::from_formatted_str(invalid_hex).is_err()); - } - - #[test] - fn contract_package_hash_from_str() { - let contract_package_hash = ContractPackageHash([3; 32]); - let encoded = contract_package_hash.to_formatted_string(); - let decoded = ContractPackageHash::from_formatted_str(&encoded).unwrap(); - assert_eq!(contract_package_hash, decoded); - - let invalid_prefix = - "contract-package0000000000000000000000000000000000000000000000000000000000000000"; - assert!(matches!( - ContractPackageHash::from_formatted_str(invalid_prefix).unwrap_err(), - FromStrError::InvalidPrefix - )); - - let short_addr = - "contract-package-00000000000000000000000000000000000000000000000000000000000000"; - assert!(matches!( - ContractPackageHash::from_formatted_str(short_addr).unwrap_err(), - FromStrError::Hash(_) - )); - - let long_addr = - "contract-package-000000000000000000000000000000000000000000000000000000000000000000"; - assert!(matches!( - ContractPackageHash::from_formatted_str(long_addr).unwrap_err(), - FromStrError::Hash(_) - )); - - let invalid_hex = - "contract-package-000000000000000000000000000000000000000000000000000000000000000g"; - assert!(matches!( - ContractPackageHash::from_formatted_str(invalid_hex).unwrap_err(), - FromStrError::Hex(_) - )); - } - - #[test] - fn contract_package_hash_from_legacy_str() { - let contract_package_hash = ContractPackageHash([3; 32]); - let hex_addr = contract_package_hash.to_string(); - let legacy_encoded = format!("contract-package-wasm{}", hex_addr); - let decoded_from_legacy = ContractPackageHash::from_formatted_str(&legacy_encoded) - .expect("should accept legacy prefixed string"); - assert_eq!( - contract_package_hash, decoded_from_legacy, - "decoded_from_legacy should equal decoded" - ); - - let invalid_prefix = - "contract-packagewasm0000000000000000000000000000000000000000000000000000000000000000"; - assert!(matches!( - ContractPackageHash::from_formatted_str(invalid_prefix).unwrap_err(), - FromStrError::InvalidPrefix - )); - - let short_addr = - "contract-package-wasm00000000000000000000000000000000000000000000000000000000000000"; - assert!(matches!( - ContractPackageHash::from_formatted_str(short_addr).unwrap_err(), - FromStrError::Hash(_) - )); - - let long_addr = - "contract-package-wasm000000000000000000000000000000000000000000000000000000000000000000"; - assert!(matches!( - ContractPackageHash::from_formatted_str(long_addr).unwrap_err(), - FromStrError::Hash(_) - )); - - let invalid_hex = - "contract-package-wasm000000000000000000000000000000000000000000000000000000000000000g"; - assert!(matches!( - ContractPackageHash::from_formatted_str(invalid_hex).unwrap_err(), - FromStrError::Hex(_) - )); - } - - #[test] - fn contract_hash_serde_roundtrip() { - let contract_hash = ContractHash([255; 32]); - let serialized = bincode::serialize(&contract_hash).unwrap(); - let deserialized = bincode::deserialize(&serialized).unwrap(); - assert_eq!(contract_hash, deserialized) - } - - #[test] - fn contract_hash_json_roundtrip() { - let contract_hash = ContractHash([255; 32]); - let json_string = serde_json::to_string_pretty(&contract_hash).unwrap(); - let decoded = serde_json::from_str(&json_string).unwrap(); - assert_eq!(contract_hash, decoded) - } - - #[test] - fn contract_package_hash_serde_roundtrip() { - let contract_hash = ContractPackageHash([255; 32]); - let serialized = bincode::serialize(&contract_hash).unwrap(); - let deserialized = bincode::deserialize(&serialized).unwrap(); - assert_eq!(contract_hash, deserialized) - } - - #[test] - fn contract_package_hash_json_roundtrip() { - let contract_hash = ContractPackageHash([255; 32]); - let json_string = serde_json::to_string_pretty(&contract_hash).unwrap(); - let decoded = serde_json::from_str(&json_string).unwrap(); - assert_eq!(contract_hash, decoded) - } -} - -#[cfg(test)] -mod prop_tests { - use proptest::prelude::*; - - use crate::{bytesrepr, gens}; - - proptest! { - #![proptest_config(ProptestConfig { - cases: 1024, - .. ProptestConfig::default() - })] - - #[test] - fn test_value_contract(contract in gens::contract_arb()) { - bytesrepr::test_serialization_roundtrip(&contract); - } - - #[test] - fn test_value_contract_package(contract_pkg in gens::contract_package_arb()) { - bytesrepr::test_serialization_roundtrip(&contract_pkg); - } - } -} diff --git a/casper_types_ver_2_0/src/crypto.rs b/casper_types_ver_2_0/src/crypto.rs deleted file mode 100644 index fbcd172c..00000000 --- a/casper_types_ver_2_0/src/crypto.rs +++ /dev/null @@ -1,35 +0,0 @@ -//! Cryptographic types and operations on them - -mod asymmetric_key; -mod error; - -use blake2::{ - digest::{Update, VariableOutput}, - VarBlake2b, -}; - -use crate::key::BLAKE2B_DIGEST_LENGTH; -#[cfg(any(feature = "std", test))] -pub use asymmetric_key::generate_ed25519_keypair; -#[cfg(any(feature = "testing", feature = "gens", test))] -pub use asymmetric_key::gens; -pub use asymmetric_key::{ - sign, verify, AsymmetricType, PublicKey, SecretKey, Signature, ED25519_TAG, SECP256K1_TAG, - SYSTEM_ACCOUNT, SYSTEM_TAG, -}; -pub use error::Error; -#[cfg(any(feature = "std", test))] -pub use error::ErrorExt; - -#[doc(hidden)] -pub fn blake2b>(data: T) -> [u8; BLAKE2B_DIGEST_LENGTH] { - let mut result = [0; BLAKE2B_DIGEST_LENGTH]; - // NOTE: Assumed safe as `BLAKE2B_DIGEST_LENGTH` is a valid value for a hasher - let mut hasher = VarBlake2b::new(BLAKE2B_DIGEST_LENGTH).expect("should create hasher"); - - hasher.update(data); - hasher.finalize_variable(|slice| { - result.copy_from_slice(slice); - }); - result -} diff --git a/casper_types_ver_2_0/src/crypto/asymmetric_key.rs b/casper_types_ver_2_0/src/crypto/asymmetric_key.rs deleted file mode 100644 index 1f445b78..00000000 --- a/casper_types_ver_2_0/src/crypto/asymmetric_key.rs +++ /dev/null @@ -1,1304 +0,0 @@ -//! Asymmetric key types and methods on them - -use alloc::{ - format, - string::{String, ToString}, - vec::Vec, -}; -use core::{ - cmp::Ordering, - convert::TryFrom, - fmt::{self, Debug, Display, Formatter}, - hash::{Hash, Hasher}, - iter, - marker::Copy, -}; -#[cfg(any(feature = "std", test))] -use std::path::Path; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(any(feature = "std", test))] -use derp::{Der, Tag}; -use ed25519_dalek::{ - Signature as Ed25519Signature, SigningKey as Ed25519SecretKey, - VerifyingKey as Ed25519PublicKey, PUBLIC_KEY_LENGTH as ED25519_PUBLIC_KEY_LENGTH, - SECRET_KEY_LENGTH as ED25519_SECRET_KEY_LENGTH, SIGNATURE_LENGTH as ED25519_SIGNATURE_LENGTH, -}; -use hex_fmt::HexFmt; -use k256::ecdsa::{ - signature::{Signer, Verifier}, - Signature as Secp256k1Signature, SigningKey as Secp256k1SecretKey, - VerifyingKey as Secp256k1PublicKey, -}; -#[cfg(feature = "json-schema")] -use once_cell::sync::Lazy; -#[cfg(any(feature = "std", test))] -use pem::Pem; -#[cfg(any(feature = "testing", test))] -use rand::{Rng, RngCore}; -#[cfg(feature = "json-schema")] -use schemars::{gen::SchemaGenerator, schema::Schema, JsonSchema}; -use serde::{Deserialize, Deserializer, Serialize, Serializer}; -#[cfg(feature = "json-schema")] -use serde_json::json; -#[cfg(any(feature = "std", test))] -use untrusted::Input; - -#[cfg(any(feature = "testing", test))] -use crate::testing::TestRng; -use crate::{ - account::AccountHash, - bytesrepr, - bytesrepr::{FromBytes, ToBytes, U8_SERIALIZED_LENGTH}, - checksummed_hex, - crypto::Error, - CLType, CLTyped, Tagged, -}; -#[cfg(any(feature = "std", test))] -use crate::{ - crypto::ErrorExt, - file_utils::{read_file, write_file, write_private_file}, -}; - -#[cfg(any(feature = "testing", test))] -pub mod gens; -#[cfg(test)] -mod tests; - -const TAG_LENGTH: usize = U8_SERIALIZED_LENGTH; - -/// Tag for system variant. -pub const SYSTEM_TAG: u8 = 0; -const SYSTEM: &str = "System"; - -/// Tag for ed25519 variant. -pub const ED25519_TAG: u8 = 1; -const ED25519: &str = "Ed25519"; - -/// Tag for secp256k1 variant. -pub const SECP256K1_TAG: u8 = 2; -const SECP256K1: &str = "Secp256k1"; - -const SECP256K1_SECRET_KEY_LENGTH: usize = 32; -const SECP256K1_COMPRESSED_PUBLIC_KEY_LENGTH: usize = 33; -const SECP256K1_SIGNATURE_LENGTH: usize = 64; - -/// Public key for system account. -pub const SYSTEM_ACCOUNT: PublicKey = PublicKey::System; - -// See https://www.secg.org/sec1-v2.pdf#subsection.C.4 -#[cfg(any(feature = "std", test))] -const EC_PUBLIC_KEY_OBJECT_IDENTIFIER: [u8; 7] = [42, 134, 72, 206, 61, 2, 1]; - -// See https://tools.ietf.org/html/rfc8410#section-10.3 -#[cfg(any(feature = "std", test))] -const ED25519_OBJECT_IDENTIFIER: [u8; 3] = [43, 101, 112]; -#[cfg(any(feature = "std", test))] -const ED25519_PEM_SECRET_KEY_TAG: &str = "PRIVATE KEY"; -#[cfg(any(feature = "std", test))] -const ED25519_PEM_PUBLIC_KEY_TAG: &str = "PUBLIC KEY"; - -// Ref? -#[cfg(any(feature = "std", test))] -const SECP256K1_OBJECT_IDENTIFIER: [u8; 5] = [43, 129, 4, 0, 10]; -#[cfg(any(feature = "std", test))] -const SECP256K1_PEM_SECRET_KEY_TAG: &str = "EC PRIVATE KEY"; -#[cfg(any(feature = "std", test))] -const SECP256K1_PEM_PUBLIC_KEY_TAG: &str = "PUBLIC KEY"; - -#[cfg(feature = "json-schema")] -static ED25519_SECRET_KEY: Lazy = Lazy::new(|| { - let bytes = [15u8; SecretKey::ED25519_LENGTH]; - SecretKey::ed25519_from_bytes(bytes).unwrap() -}); - -#[cfg(feature = "json-schema")] -static ED25519_PUBLIC_KEY: Lazy = Lazy::new(|| { - let bytes = [15u8; SecretKey::ED25519_LENGTH]; - let secret_key = SecretKey::ed25519_from_bytes(bytes).unwrap(); - PublicKey::from(&secret_key) -}); - -/// Operations on asymmetric cryptographic type. -pub trait AsymmetricType<'a> -where - Self: 'a + Sized + Tagged, - Vec: From<&'a Self>, -{ - /// Converts `self` to hex, where the first byte represents the algorithm tag. - fn to_hex(&'a self) -> String { - let bytes = iter::once(self.tag()) - .chain(Vec::::from(self)) - .collect::>(); - base16::encode_lower(&bytes) - } - - /// Tries to decode `Self` from its hex-representation. The hex format should be as produced - /// by `AsymmetricType::to_hex()`. - fn from_hex>(input: A) -> Result { - if input.as_ref().len() < 2 { - return Err(Error::AsymmetricKey( - "failed to decode from hex: too short".to_string(), - )); - } - - let (tag_hex, key_hex) = input.as_ref().split_at(2); - - let tag = checksummed_hex::decode(tag_hex)?; - let key_bytes = checksummed_hex::decode(key_hex)?; - - match tag[0] { - SYSTEM_TAG => { - if key_bytes.is_empty() { - Ok(Self::system()) - } else { - Err(Error::AsymmetricKey( - "failed to decode from hex: invalid system variant".to_string(), - )) - } - } - ED25519_TAG => Self::ed25519_from_bytes(&key_bytes), - SECP256K1_TAG => Self::secp256k1_from_bytes(&key_bytes), - _ => Err(Error::AsymmetricKey(format!( - "failed to decode from hex: invalid tag. Expected {}, {} or {}, got {}", - SYSTEM_TAG, ED25519_TAG, SECP256K1_TAG, tag[0] - ))), - } - } - - /// Constructs a new system variant. - fn system() -> Self; - - /// Constructs a new ed25519 variant from a byte slice. - fn ed25519_from_bytes>(bytes: T) -> Result; - - /// Constructs a new secp256k1 variant from a byte slice. - fn secp256k1_from_bytes>(bytes: T) -> Result; -} - -/// A secret or private asymmetric key. -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[non_exhaustive] -pub enum SecretKey { - /// System secret key. - System, - /// Ed25519 secret key. - #[cfg_attr(feature = "datasize", data_size(skip))] - // Manually verified to have no data on the heap. - Ed25519(Ed25519SecretKey), - /// secp256k1 secret key. - #[cfg_attr(feature = "datasize", data_size(skip))] - Secp256k1(Secp256k1SecretKey), -} - -impl SecretKey { - /// The length in bytes of a system secret key. - pub const SYSTEM_LENGTH: usize = 0; - - /// The length in bytes of an Ed25519 secret key. - pub const ED25519_LENGTH: usize = ED25519_SECRET_KEY_LENGTH; - - /// The length in bytes of a secp256k1 secret key. - pub const SECP256K1_LENGTH: usize = SECP256K1_SECRET_KEY_LENGTH; - - /// Constructs a new system variant. - pub fn system() -> Self { - SecretKey::System - } - - /// Constructs a new ed25519 variant from a byte slice. - pub fn ed25519_from_bytes>(bytes: T) -> Result { - Ok(SecretKey::Ed25519(Ed25519SecretKey::try_from( - bytes.as_ref(), - )?)) - } - - /// Constructs a new secp256k1 variant from a byte slice. - pub fn secp256k1_from_bytes>(bytes: T) -> Result { - Ok(SecretKey::Secp256k1( - Secp256k1SecretKey::from_slice(bytes.as_ref()).map_err(|_| Error::SignatureError)?, - )) - } - - /// Generates a new ed25519 variant using the system's secure random number generator. - #[cfg(any(feature = "std", test))] - pub fn generate_ed25519() -> Result { - let mut bytes = [0u8; Self::ED25519_LENGTH]; - getrandom::getrandom(&mut bytes[..])?; - SecretKey::ed25519_from_bytes(bytes).map_err(Into::into) - } - - /// Generates a new secp256k1 variant using the system's secure random number generator. - #[cfg(any(feature = "std", test))] - pub fn generate_secp256k1() -> Result { - let mut bytes = [0u8; Self::SECP256K1_LENGTH]; - getrandom::getrandom(&mut bytes[..])?; - SecretKey::secp256k1_from_bytes(bytes).map_err(Into::into) - } - - /// Attempts to write the key bytes to the configured file path. - #[cfg(any(feature = "std", test))] - pub fn to_file>(&self, file: P) -> Result<(), ErrorExt> { - write_private_file(file, self.to_pem()?).map_err(ErrorExt::SecretKeySave) - } - - /// Attempts to read the key bytes from configured file path. - #[cfg(any(feature = "std", test))] - pub fn from_file>(file: P) -> Result { - let data = read_file(file).map_err(ErrorExt::SecretKeyLoad)?; - Self::from_pem(data) - } - - /// DER encodes a key. - #[cfg(any(feature = "std", test))] - pub fn to_der(&self) -> Result, ErrorExt> { - match self { - SecretKey::System => Err(Error::System(String::from("to_der")).into()), - SecretKey::Ed25519(secret_key) => { - // See https://tools.ietf.org/html/rfc8410#section-10.3 - let mut key_bytes = vec![]; - let mut der = Der::new(&mut key_bytes); - der.octet_string(&secret_key.to_bytes())?; - - let mut encoded = vec![]; - der = Der::new(&mut encoded); - der.sequence(|der| { - der.integer(&[0])?; - der.sequence(|der| der.oid(&ED25519_OBJECT_IDENTIFIER))?; - der.octet_string(&key_bytes) - })?; - Ok(encoded) - } - SecretKey::Secp256k1(secret_key) => { - // See https://www.secg.org/sec1-v2.pdf#subsection.C.4 - let mut oid_bytes = vec![]; - let mut der = Der::new(&mut oid_bytes); - der.oid(&SECP256K1_OBJECT_IDENTIFIER)?; - - let mut encoded = vec![]; - der = Der::new(&mut encoded); - der.sequence(|der| { - der.integer(&[1])?; - der.octet_string(secret_key.to_bytes().as_slice())?; - der.element(Tag::ContextSpecificConstructed0, &oid_bytes) - })?; - Ok(encoded) - } - } - } - - /// Decodes a key from a DER-encoded slice. - #[cfg(any(feature = "std", test))] - pub fn from_der>(input: T) -> Result { - let input = Input::from(input.as_ref()); - - let (key_type_tag, raw_bytes) = input.read_all(derp::Error::Read, |input| { - derp::nested(input, Tag::Sequence, |input| { - // Safe to ignore the first value which should be an integer. - let version_slice = - derp::expect_tag_and_get_value(input, Tag::Integer)?.as_slice_less_safe(); - if version_slice.len() != 1 { - return Err(derp::Error::NonZeroUnusedBits); - } - let version = version_slice[0]; - - // Read the next value. - let (tag, value) = derp::read_tag_and_get_value(input)?; - if tag == Tag::Sequence as u8 { - // Expecting an Ed25519 key. - if version != 0 { - return Err(derp::Error::WrongValue); - } - - // The sequence should have one element: an object identifier defining Ed25519. - let object_identifier = value.read_all(derp::Error::Read, |input| { - derp::expect_tag_and_get_value(input, Tag::Oid) - })?; - if object_identifier.as_slice_less_safe() != ED25519_OBJECT_IDENTIFIER { - return Err(derp::Error::WrongValue); - } - - // The third and final value should be the raw bytes of the secret key as an - // octet string in an octet string. - let raw_bytes = derp::nested(input, Tag::OctetString, |input| { - derp::expect_tag_and_get_value(input, Tag::OctetString) - })? - .as_slice_less_safe(); - - return Ok((ED25519_TAG, raw_bytes)); - } else if tag == Tag::OctetString as u8 { - // Expecting a secp256k1 key. - if version != 1 { - return Err(derp::Error::WrongValue); - } - - // The octet string is the secret key. - let raw_bytes = value.as_slice_less_safe(); - - // The object identifier is next. - let parameter0 = - derp::expect_tag_and_get_value(input, Tag::ContextSpecificConstructed0)?; - let object_identifier = parameter0.read_all(derp::Error::Read, |input| { - derp::expect_tag_and_get_value(input, Tag::Oid) - })?; - if object_identifier.as_slice_less_safe() != SECP256K1_OBJECT_IDENTIFIER { - return Err(derp::Error::WrongValue); - } - - // There might be an optional public key as the final value, but we're not - // interested in parsing that. Read it to ensure `input.read_all` doesn't fail - // with unused bytes error. - let _ = derp::read_tag_and_get_value(input); - - return Ok((SECP256K1_TAG, raw_bytes)); - } - - Err(derp::Error::WrongValue) - }) - })?; - - match key_type_tag { - SYSTEM_TAG => Err(Error::AsymmetricKey("cannot construct variant".to_string()).into()), - ED25519_TAG => SecretKey::ed25519_from_bytes(raw_bytes).map_err(Into::into), - SECP256K1_TAG => SecretKey::secp256k1_from_bytes(raw_bytes).map_err(Into::into), - _ => Err(Error::AsymmetricKey("unknown type tag".to_string()).into()), - } - } - - /// PEM encodes a key. - #[cfg(any(feature = "std", test))] - pub fn to_pem(&self) -> Result { - let tag = match self { - SecretKey::System => return Err(Error::System(String::from("to_pem")).into()), - SecretKey::Ed25519(_) => ED25519_PEM_SECRET_KEY_TAG.to_string(), - SecretKey::Secp256k1(_) => SECP256K1_PEM_SECRET_KEY_TAG.to_string(), - }; - let contents = self.to_der()?; - let pem = Pem { tag, contents }; - Ok(pem::encode(&pem)) - } - - /// Decodes a key from a PEM-encoded slice. - #[cfg(any(feature = "std", test))] - pub fn from_pem>(input: T) -> Result { - let pem = pem::parse(input)?; - - let secret_key = Self::from_der(&pem.contents)?; - - let bad_tag = |expected_tag: &str| { - ErrorExt::FromPem(format!( - "invalid tag: expected {}, got {}", - expected_tag, pem.tag - )) - }; - - match secret_key { - SecretKey::System => return Err(Error::System(String::from("from_pem")).into()), - SecretKey::Ed25519(_) => { - if pem.tag != ED25519_PEM_SECRET_KEY_TAG { - return Err(bad_tag(ED25519_PEM_SECRET_KEY_TAG)); - } - } - SecretKey::Secp256k1(_) => { - if pem.tag != SECP256K1_PEM_SECRET_KEY_TAG { - return Err(bad_tag(SECP256K1_PEM_SECRET_KEY_TAG)); - } - } - } - - Ok(secret_key) - } - - /// Returns a random `SecretKey`. - #[cfg(any(feature = "testing", test))] - pub fn random(rng: &mut TestRng) -> Self { - if rng.gen() { - Self::random_ed25519(rng) - } else { - Self::random_secp256k1(rng) - } - } - - /// Returns a random Ed25519 variant of `SecretKey`. - #[cfg(any(feature = "testing", test))] - pub fn random_ed25519(rng: &mut TestRng) -> Self { - let mut bytes = [0u8; Self::ED25519_LENGTH]; - rng.fill_bytes(&mut bytes[..]); - SecretKey::ed25519_from_bytes(bytes).unwrap() - } - - /// Returns a random secp256k1 variant of `SecretKey`. - #[cfg(any(feature = "testing", test))] - pub fn random_secp256k1(rng: &mut TestRng) -> Self { - let mut bytes = [0u8; Self::SECP256K1_LENGTH]; - rng.fill_bytes(&mut bytes[..]); - SecretKey::secp256k1_from_bytes(bytes).unwrap() - } - - // This method is not intended to be used by third party crates. - #[doc(hidden)] - #[cfg(feature = "json-schema")] - pub fn example() -> &'static Self { - &ED25519_SECRET_KEY - } - - fn variant_name(&self) -> &str { - match self { - SecretKey::System => SYSTEM, - SecretKey::Ed25519(_) => ED25519, - SecretKey::Secp256k1(_) => SECP256K1, - } - } -} - -impl Debug for SecretKey { - fn fmt(&self, formatter: &mut Formatter<'_>) -> fmt::Result { - write!(formatter, "SecretKey::{}", self.variant_name()) - } -} - -impl Display for SecretKey { - fn fmt(&self, formatter: &mut Formatter<'_>) -> fmt::Result { - ::fmt(self, formatter) - } -} - -impl Tagged for SecretKey { - fn tag(&self) -> u8 { - match self { - SecretKey::System => SYSTEM_TAG, - SecretKey::Ed25519(_) => ED25519_TAG, - SecretKey::Secp256k1(_) => SECP256K1_TAG, - } - } -} - -/// A public asymmetric key. -#[derive(Clone, Eq, PartialEq)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[non_exhaustive] -pub enum PublicKey { - /// System public key. - System, - /// Ed25519 public key. - #[cfg_attr(feature = "datasize", data_size(skip))] - Ed25519(Ed25519PublicKey), - /// secp256k1 public key. - #[cfg_attr(feature = "datasize", data_size(skip))] - Secp256k1(Secp256k1PublicKey), -} - -impl PublicKey { - /// The length in bytes of a system public key. - pub const SYSTEM_LENGTH: usize = 0; - - /// The length in bytes of an Ed25519 public key. - pub const ED25519_LENGTH: usize = ED25519_PUBLIC_KEY_LENGTH; - - /// The length in bytes of a secp256k1 public key. - pub const SECP256K1_LENGTH: usize = SECP256K1_COMPRESSED_PUBLIC_KEY_LENGTH; - - /// Creates an `AccountHash` from a given `PublicKey` instance. - pub fn to_account_hash(&self) -> AccountHash { - AccountHash::from(self) - } - - /// Returns `true` if this public key is of the `System` variant. - pub fn is_system(&self) -> bool { - matches!(self, PublicKey::System) - } - - /// Attempts to write the key bytes to the configured file path. - #[cfg(any(feature = "std", test))] - pub fn to_file>(&self, file: P) -> Result<(), ErrorExt> { - write_file(file, self.to_pem()?).map_err(ErrorExt::PublicKeySave) - } - - /// Attempts to read the key bytes from configured file path. - #[cfg(any(feature = "std", test))] - pub fn from_file>(file: P) -> Result { - let data = read_file(file).map_err(ErrorExt::PublicKeyLoad)?; - Self::from_pem(data) - } - - /// DER encodes a key. - #[cfg(any(feature = "std", test))] - pub fn to_der(&self) -> Result, ErrorExt> { - match self { - PublicKey::System => Err(Error::System(String::from("to_der")).into()), - PublicKey::Ed25519(public_key) => { - // See https://tools.ietf.org/html/rfc8410#section-10.1 - let mut encoded = vec![]; - let mut der = Der::new(&mut encoded); - der.sequence(|der| { - der.sequence(|der| der.oid(&ED25519_OBJECT_IDENTIFIER))?; - der.bit_string(0, public_key.as_ref()) - })?; - Ok(encoded) - } - PublicKey::Secp256k1(public_key) => { - // See https://www.secg.org/sec1-v2.pdf#subsection.C.3 - let mut encoded = vec![]; - let mut der = Der::new(&mut encoded); - der.sequence(|der| { - der.sequence(|der| { - der.oid(&EC_PUBLIC_KEY_OBJECT_IDENTIFIER)?; - der.oid(&SECP256K1_OBJECT_IDENTIFIER) - })?; - der.bit_string(0, public_key.to_encoded_point(true).as_ref()) - })?; - Ok(encoded) - } - } - } - - /// Decodes a key from a DER-encoded slice. - #[cfg(any(feature = "std", test))] - pub fn from_der>(input: T) -> Result { - let input = Input::from(input.as_ref()); - - let mut key_type_tag = ED25519_TAG; - let raw_bytes = input.read_all(derp::Error::Read, |input| { - derp::nested(input, Tag::Sequence, |input| { - derp::nested(input, Tag::Sequence, |input| { - // Read the first value. - let object_identifier = - derp::expect_tag_and_get_value(input, Tag::Oid)?.as_slice_less_safe(); - if object_identifier == ED25519_OBJECT_IDENTIFIER { - key_type_tag = ED25519_TAG; - Ok(()) - } else if object_identifier == EC_PUBLIC_KEY_OBJECT_IDENTIFIER { - // Assert the next object identifier is the secp256k1 ID. - let next_object_identifier = - derp::expect_tag_and_get_value(input, Tag::Oid)?.as_slice_less_safe(); - if next_object_identifier != SECP256K1_OBJECT_IDENTIFIER { - return Err(derp::Error::WrongValue); - } - - key_type_tag = SECP256K1_TAG; - Ok(()) - } else { - Err(derp::Error::WrongValue) - } - })?; - Ok(derp::bit_string_with_no_unused_bits(input)?.as_slice_less_safe()) - }) - })?; - - match key_type_tag { - ED25519_TAG => PublicKey::ed25519_from_bytes(raw_bytes).map_err(Into::into), - SECP256K1_TAG => PublicKey::secp256k1_from_bytes(raw_bytes).map_err(Into::into), - _ => unreachable!(), - } - } - - /// PEM encodes a key. - #[cfg(any(feature = "std", test))] - pub fn to_pem(&self) -> Result { - let tag = match self { - PublicKey::System => return Err(Error::System(String::from("to_pem")).into()), - PublicKey::Ed25519(_) => ED25519_PEM_PUBLIC_KEY_TAG.to_string(), - PublicKey::Secp256k1(_) => SECP256K1_PEM_PUBLIC_KEY_TAG.to_string(), - }; - let contents = self.to_der()?; - let pem = Pem { tag, contents }; - Ok(pem::encode(&pem)) - } - - /// Decodes a key from a PEM-encoded slice. - #[cfg(any(feature = "std", test))] - pub fn from_pem>(input: T) -> Result { - let pem = pem::parse(input)?; - let public_key = Self::from_der(&pem.contents)?; - let bad_tag = |expected_tag: &str| { - ErrorExt::FromPem(format!( - "invalid tag: expected {}, got {}", - expected_tag, pem.tag - )) - }; - match public_key { - PublicKey::System => return Err(Error::System(String::from("from_pem")).into()), - PublicKey::Ed25519(_) => { - if pem.tag != ED25519_PEM_PUBLIC_KEY_TAG { - return Err(bad_tag(ED25519_PEM_PUBLIC_KEY_TAG)); - } - } - PublicKey::Secp256k1(_) => { - if pem.tag != SECP256K1_PEM_PUBLIC_KEY_TAG { - return Err(bad_tag(SECP256K1_PEM_PUBLIC_KEY_TAG)); - } - } - } - Ok(public_key) - } - - /// Returns a random `PublicKey`. - #[cfg(any(feature = "testing", test))] - pub fn random(rng: &mut TestRng) -> Self { - let secret_key = SecretKey::random(rng); - PublicKey::from(&secret_key) - } - - /// Returns a random Ed25519 variant of `PublicKey`. - #[cfg(any(feature = "testing", test))] - pub fn random_ed25519(rng: &mut TestRng) -> Self { - let secret_key = SecretKey::random_ed25519(rng); - PublicKey::from(&secret_key) - } - - /// Returns a random secp256k1 variant of `PublicKey`. - #[cfg(any(feature = "testing", test))] - pub fn random_secp256k1(rng: &mut TestRng) -> Self { - let secret_key = SecretKey::random_secp256k1(rng); - PublicKey::from(&secret_key) - } - - // This method is not intended to be used by third party crates. - #[doc(hidden)] - #[cfg(feature = "json-schema")] - pub fn example() -> &'static Self { - &ED25519_PUBLIC_KEY - } - - fn variant_name(&self) -> &str { - match self { - PublicKey::System => SYSTEM, - PublicKey::Ed25519(_) => ED25519, - PublicKey::Secp256k1(_) => SECP256K1, - } - } -} - -impl AsymmetricType<'_> for PublicKey { - fn system() -> Self { - PublicKey::System - } - - fn ed25519_from_bytes>(bytes: T) -> Result { - Ok(PublicKey::Ed25519(Ed25519PublicKey::try_from( - bytes.as_ref(), - )?)) - } - - fn secp256k1_from_bytes>(bytes: T) -> Result { - Ok(PublicKey::Secp256k1( - Secp256k1PublicKey::from_sec1_bytes(bytes.as_ref()) - .map_err(|_| Error::SignatureError)?, - )) - } -} - -impl From<&SecretKey> for PublicKey { - fn from(secret_key: &SecretKey) -> PublicKey { - match secret_key { - SecretKey::System => PublicKey::System, - SecretKey::Ed25519(secret_key) => PublicKey::Ed25519(secret_key.into()), - SecretKey::Secp256k1(secret_key) => PublicKey::Secp256k1(secret_key.into()), - } - } -} - -#[cfg(any(feature = "testing", test))] -impl PartialEq for SecretKey { - fn eq(&self, other: &Self) -> bool { - match (self, other) { - (Self::System, Self::System) => true, - (Self::Ed25519(k1), Self::Ed25519(k2)) => k1.to_bytes() == k2.to_bytes(), - (Self::Secp256k1(k1), Self::Secp256k1(k2)) => k1.to_bytes() == k2.to_bytes(), - _ => false, - } - } -} -#[cfg(any(feature = "testing", test))] -impl Eq for SecretKey {} - -#[cfg(any(feature = "testing", test))] -impl Ord for SecretKey { - fn cmp(&self, other: &Self) -> Ordering { - match (self, other) { - (Self::System, Self::System) => Ordering::Equal, - (Self::Ed25519(k1), Self::Ed25519(k2)) => k1.to_bytes().cmp(&k2.to_bytes()), - (Self::Secp256k1(k1), Self::Secp256k1(k2)) => k1.to_bytes().cmp(&k2.to_bytes()), - (k1, k2) => k1.variant_name().cmp(k2.variant_name()), - } - } -} -#[cfg(any(feature = "testing", test))] -impl PartialOrd for SecretKey { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } -} - -impl From<&PublicKey> for Vec { - fn from(public_key: &PublicKey) -> Self { - match public_key { - PublicKey::System => Vec::new(), - PublicKey::Ed25519(key) => key.to_bytes().into(), - PublicKey::Secp256k1(key) => key.to_encoded_point(true).as_ref().into(), - } - } -} - -impl From for Vec { - fn from(public_key: PublicKey) -> Self { - Vec::::from(&public_key) - } -} - -impl Debug for PublicKey { - fn fmt(&self, formatter: &mut Formatter<'_>) -> fmt::Result { - write!( - formatter, - "PublicKey::{}({})", - self.variant_name(), - base16::encode_lower(&Into::>::into(self)) - ) - } -} - -impl Display for PublicKey { - fn fmt(&self, formatter: &mut Formatter<'_>) -> fmt::Result { - write!( - formatter, - "PubKey::{}({:10})", - self.variant_name(), - HexFmt(Into::>::into(self)) - ) - } -} - -impl PartialOrd for PublicKey { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } -} - -impl Ord for PublicKey { - fn cmp(&self, other: &Self) -> Ordering { - let self_tag = self.tag(); - let other_tag = other.tag(); - if self_tag == other_tag { - Into::>::into(self).cmp(&Into::>::into(other)) - } else { - self_tag.cmp(&other_tag) - } - } -} - -// This implementation of `Hash` agrees with the derived `PartialEq`. It's required since -// `ed25519_dalek::PublicKey` doesn't implement `Hash`. -#[allow(clippy::derived_hash_with_manual_eq)] -impl Hash for PublicKey { - fn hash(&self, state: &mut H) { - self.tag().hash(state); - Into::>::into(self).hash(state); - } -} - -impl Tagged for PublicKey { - fn tag(&self) -> u8 { - match self { - PublicKey::System => SYSTEM_TAG, - PublicKey::Ed25519(_) => ED25519_TAG, - PublicKey::Secp256k1(_) => SECP256K1_TAG, - } - } -} - -impl ToBytes for PublicKey { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - TAG_LENGTH - + match self { - PublicKey::System => Self::SYSTEM_LENGTH, - PublicKey::Ed25519(_) => Self::ED25519_LENGTH, - PublicKey::Secp256k1(_) => Self::SECP256K1_LENGTH, - } - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - match self { - PublicKey::System => writer.push(SYSTEM_TAG), - PublicKey::Ed25519(public_key) => { - writer.push(ED25519_TAG); - writer.extend_from_slice(public_key.as_bytes()); - } - PublicKey::Secp256k1(public_key) => { - writer.push(SECP256K1_TAG); - writer.extend_from_slice(public_key.to_encoded_point(true).as_ref()); - } - } - Ok(()) - } -} - -impl FromBytes for PublicKey { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (tag, remainder) = u8::from_bytes(bytes)?; - match tag { - SYSTEM_TAG => Ok((PublicKey::System, remainder)), - ED25519_TAG => { - let (raw_bytes, remainder): ([u8; Self::ED25519_LENGTH], _) = - FromBytes::from_bytes(remainder)?; - let public_key = Self::ed25519_from_bytes(raw_bytes) - .map_err(|_error| bytesrepr::Error::Formatting)?; - Ok((public_key, remainder)) - } - SECP256K1_TAG => { - let (raw_bytes, remainder): ([u8; Self::SECP256K1_LENGTH], _) = - FromBytes::from_bytes(remainder)?; - let public_key = Self::secp256k1_from_bytes(raw_bytes) - .map_err(|_error| bytesrepr::Error::Formatting)?; - Ok((public_key, remainder)) - } - _ => Err(bytesrepr::Error::Formatting), - } - } -} - -impl Serialize for PublicKey { - fn serialize(&self, serializer: S) -> Result { - detail::serialize(self, serializer) - } -} - -impl<'de> Deserialize<'de> for PublicKey { - fn deserialize>(deserializer: D) -> Result { - detail::deserialize(deserializer) - } -} - -#[cfg(feature = "json-schema")] -impl JsonSchema for PublicKey { - fn schema_name() -> String { - String::from("PublicKey") - } - - fn json_schema(gen: &mut SchemaGenerator) -> Schema { - let schema = gen.subschema_for::(); - let mut schema_object = schema.into_object(); - schema_object.metadata().description = Some( - "Hex-encoded cryptographic public key, including the algorithm tag prefix.".to_string(), - ); - schema_object.metadata().examples = vec![ - json!({ - "name": "SystemPublicKey", - "description": "A pseudo public key, used for example when the system proposes an \ - immediate switch block after a network upgrade rather than a specific validator. \ - Its hex-encoded value is always '00', as is the corresponding pseudo signature's", - "value": "00" - }), - json!({ - "name": "Ed25519PublicKey", - "description": "An Ed25519 public key. Its hex-encoded value begins '01' and is \ - followed by 64 characters", - "value": "018a88e3dd7409f195fd52db2d3cba5d72ca6709bf1d94121bf3748801b40f6f5c" - }), - json!({ - "name": "Secp256k1PublicKey", - "description": "A secp256k1 public key. Its hex-encoded value begins '02' and is \ - followed by 66 characters", - "value": "0203408e9526316fd1f8def480dd45b2cc72ffd732771c9ceb5d92ffa4051e6ee084" - }), - ]; - schema_object.into() - } -} - -impl CLTyped for PublicKey { - fn cl_type() -> CLType { - CLType::PublicKey - } -} - -/// A signature of given data. -#[derive(Clone, Copy)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[non_exhaustive] -pub enum Signature { - /// System signature. Cannot be verified. - System, - /// Ed25519 signature. - #[cfg_attr(feature = "datasize", data_size(skip))] - Ed25519(Ed25519Signature), - /// Secp256k1 signature. - #[cfg_attr(feature = "datasize", data_size(skip))] - Secp256k1(Secp256k1Signature), -} - -impl Signature { - /// The length in bytes of a system signature, - pub const SYSTEM_LENGTH: usize = 0; - - /// The length in bytes of an Ed25519 signature, - pub const ED25519_LENGTH: usize = ED25519_SIGNATURE_LENGTH; - - /// The length in bytes of a secp256k1 signature - pub const SECP256K1_LENGTH: usize = SECP256K1_SIGNATURE_LENGTH; - - /// Constructs a new Ed25519 variant from a byte array. - pub fn ed25519(bytes: [u8; Self::ED25519_LENGTH]) -> Result { - let signature = Ed25519Signature::from_bytes(&bytes); - Ok(Signature::Ed25519(signature)) - } - - /// Constructs a new secp256k1 variant from a byte array. - pub fn secp256k1(bytes: [u8; Self::SECP256K1_LENGTH]) -> Result { - let signature = Secp256k1Signature::try_from(&bytes[..]).map_err(|_| { - Error::AsymmetricKey(format!( - "failed to construct secp256k1 signature from {:?}", - &bytes[..] - )) - })?; - - Ok(Signature::Secp256k1(signature)) - } - - fn variant_name(&self) -> &str { - match self { - Signature::System => SYSTEM, - Signature::Ed25519(_) => ED25519, - Signature::Secp256k1(_) => SECP256K1, - } - } -} - -impl AsymmetricType<'_> for Signature { - fn system() -> Self { - Signature::System - } - - fn ed25519_from_bytes>(bytes: T) -> Result { - let signature = Ed25519Signature::try_from(bytes.as_ref()).map_err(|_| { - Error::AsymmetricKey(format!( - "failed to construct Ed25519 signature from {:?}", - bytes.as_ref() - )) - })?; - Ok(Signature::Ed25519(signature)) - } - - fn secp256k1_from_bytes>(bytes: T) -> Result { - let signature = Secp256k1Signature::try_from(bytes.as_ref()).map_err(|_| { - Error::AsymmetricKey(format!( - "failed to construct secp256k1 signature from {:?}", - bytes.as_ref() - )) - })?; - Ok(Signature::Secp256k1(signature)) - } -} - -impl Debug for Signature { - fn fmt(&self, formatter: &mut Formatter<'_>) -> fmt::Result { - write!( - formatter, - "Signature::{}({})", - self.variant_name(), - base16::encode_lower(&Into::>::into(*self)) - ) - } -} - -impl Display for Signature { - fn fmt(&self, formatter: &mut Formatter<'_>) -> fmt::Result { - write!( - formatter, - "Sig::{}({:10})", - self.variant_name(), - HexFmt(Into::>::into(*self)) - ) - } -} - -impl PartialOrd for Signature { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } -} - -impl Ord for Signature { - fn cmp(&self, other: &Self) -> Ordering { - let self_tag = self.tag(); - let other_tag = other.tag(); - if self_tag == other_tag { - Into::>::into(*self).cmp(&Into::>::into(*other)) - } else { - self_tag.cmp(&other_tag) - } - } -} - -impl PartialEq for Signature { - fn eq(&self, other: &Self) -> bool { - self.tag() == other.tag() && Into::>::into(*self) == Into::>::into(*other) - } -} - -impl Eq for Signature {} - -impl Hash for Signature { - fn hash(&self, state: &mut H) { - self.tag().hash(state); - Into::>::into(*self).hash(state); - } -} - -impl Tagged for Signature { - fn tag(&self) -> u8 { - match self { - Signature::System => SYSTEM_TAG, - Signature::Ed25519(_) => ED25519_TAG, - Signature::Secp256k1(_) => SECP256K1_TAG, - } - } -} - -impl ToBytes for Signature { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - TAG_LENGTH - + match self { - Signature::System => Self::SYSTEM_LENGTH, - Signature::Ed25519(_) => Self::ED25519_LENGTH, - Signature::Secp256k1(_) => Self::SECP256K1_LENGTH, - } - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - match self { - Signature::System => { - writer.push(SYSTEM_TAG); - } - Signature::Ed25519(signature) => { - writer.push(ED25519_TAG); - writer.extend(signature.to_bytes()); - } - Signature::Secp256k1(signature) => { - writer.push(SECP256K1_TAG); - writer.extend_from_slice(&signature.to_bytes()); - } - } - Ok(()) - } -} - -impl FromBytes for Signature { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (tag, remainder) = u8::from_bytes(bytes)?; - match tag { - SYSTEM_TAG => Ok((Signature::System, remainder)), - ED25519_TAG => { - let (raw_bytes, remainder): ([u8; Self::ED25519_LENGTH], _) = - FromBytes::from_bytes(remainder)?; - let public_key = - Self::ed25519(raw_bytes).map_err(|_error| bytesrepr::Error::Formatting)?; - Ok((public_key, remainder)) - } - SECP256K1_TAG => { - let (raw_bytes, remainder): ([u8; Self::SECP256K1_LENGTH], _) = - FromBytes::from_bytes(remainder)?; - let public_key = - Self::secp256k1(raw_bytes).map_err(|_error| bytesrepr::Error::Formatting)?; - Ok((public_key, remainder)) - } - _ => Err(bytesrepr::Error::Formatting), - } - } -} - -impl Serialize for Signature { - fn serialize(&self, serializer: S) -> Result { - detail::serialize(self, serializer) - } -} - -impl<'de> Deserialize<'de> for Signature { - fn deserialize>(deserializer: D) -> Result { - detail::deserialize(deserializer) - } -} - -impl From<&Signature> for Vec { - fn from(signature: &Signature) -> Self { - match signature { - Signature::System => Vec::new(), - Signature::Ed25519(signature) => signature.to_bytes().into(), - Signature::Secp256k1(signature) => (*signature.to_bytes()).into(), - } - } -} - -impl From for Vec { - fn from(signature: Signature) -> Self { - Vec::::from(&signature) - } -} - -#[cfg(feature = "json-schema")] -impl JsonSchema for Signature { - fn schema_name() -> String { - String::from("Signature") - } - - fn json_schema(gen: &mut SchemaGenerator) -> Schema { - let schema = gen.subschema_for::(); - let mut schema_object = schema.into_object(); - schema_object.metadata().description = Some( - "Hex-encoded cryptographic signature, including the algorithm tag prefix.".to_string(), - ); - schema_object.into() - } -} - -/// Signs the given message using the given key pair. -pub fn sign>( - message: T, - secret_key: &SecretKey, - public_key: &PublicKey, -) -> Signature { - match (secret_key, public_key) { - (SecretKey::System, PublicKey::System) => { - panic!("cannot create signature with system keys",) - } - (SecretKey::Ed25519(secret_key), PublicKey::Ed25519(_public_key)) => { - let signature = secret_key.sign(message.as_ref()); - Signature::Ed25519(signature) - } - (SecretKey::Secp256k1(secret_key), PublicKey::Secp256k1(_public_key)) => { - let signer = secret_key; - let signature: Secp256k1Signature = signer - .try_sign(message.as_ref()) - .expect("should create signature"); - Signature::Secp256k1(signature) - } - _ => panic!("secret and public key types must match"), - } -} - -/// Verifies the signature of the given message against the given public key. -pub fn verify>( - message: T, - signature: &Signature, - public_key: &PublicKey, -) -> Result<(), Error> { - match (signature, public_key) { - (Signature::System, _) => Err(Error::AsymmetricKey(String::from( - "signatures based on the system key cannot be verified", - ))), - (Signature::Ed25519(signature), PublicKey::Ed25519(public_key)) => public_key - .verify_strict(message.as_ref(), signature) - .map_err(|_| Error::AsymmetricKey(String::from("failed to verify Ed25519 signature"))), - (Signature::Secp256k1(signature), PublicKey::Secp256k1(public_key)) => { - let verifier: &Secp256k1PublicKey = public_key; - verifier - .verify(message.as_ref(), signature) - .map_err(|error| { - Error::AsymmetricKey(format!("failed to verify secp256k1 signature: {}", error)) - }) - } - _ => Err(Error::AsymmetricKey(format!( - "type mismatch between {} and {}", - signature, public_key - ))), - } -} - -/// Generates an Ed25519 keypair using the operating system's cryptographically secure random number -/// generator. -#[cfg(any(feature = "std", test))] -pub fn generate_ed25519_keypair() -> (SecretKey, PublicKey) { - let secret_key = SecretKey::generate_ed25519().unwrap(); - let public_key = PublicKey::from(&secret_key); - (secret_key, public_key) -} - -mod detail { - use alloc::{string::String, vec::Vec}; - - use serde::{de::Error as _deError, Deserialize, Deserializer, Serialize, Serializer}; - - use super::{PublicKey, Signature}; - use crate::AsymmetricType; - - /// Used to serialize and deserialize asymmetric key types where the (de)serializer is not a - /// human-readable type. - /// - /// The wrapped contents are the result of calling `t_as_ref()` on the type. - #[derive(Serialize, Deserialize)] - pub(super) enum AsymmetricTypeAsBytes { - System, - Ed25519(Vec), - Secp256k1(Vec), - } - - impl From<&PublicKey> for AsymmetricTypeAsBytes { - fn from(public_key: &PublicKey) -> Self { - match public_key { - PublicKey::System => AsymmetricTypeAsBytes::System, - key @ PublicKey::Ed25519(_) => AsymmetricTypeAsBytes::Ed25519(key.into()), - key @ PublicKey::Secp256k1(_) => AsymmetricTypeAsBytes::Secp256k1(key.into()), - } - } - } - - impl From<&Signature> for AsymmetricTypeAsBytes { - fn from(signature: &Signature) -> Self { - match signature { - Signature::System => AsymmetricTypeAsBytes::System, - key @ Signature::Ed25519(_) => AsymmetricTypeAsBytes::Ed25519(key.into()), - key @ Signature::Secp256k1(_) => AsymmetricTypeAsBytes::Secp256k1(key.into()), - } - } - } - - pub(super) fn serialize<'a, T, S>(value: &'a T, serializer: S) -> Result - where - T: AsymmetricType<'a>, - Vec: From<&'a T>, - S: Serializer, - AsymmetricTypeAsBytes: From<&'a T>, - { - if serializer.is_human_readable() { - return value.to_hex().serialize(serializer); - } - - AsymmetricTypeAsBytes::from(value).serialize(serializer) - } - - pub(super) fn deserialize<'a, 'de, T, D>(deserializer: D) -> Result - where - T: AsymmetricType<'a>, - Vec: From<&'a T>, - D: Deserializer<'de>, - { - if deserializer.is_human_readable() { - let hex_string = String::deserialize(deserializer)?; - let value = T::from_hex(hex_string.as_bytes()).map_err(D::Error::custom)?; - return Ok(value); - } - - let as_bytes = AsymmetricTypeAsBytes::deserialize(deserializer)?; - match as_bytes { - AsymmetricTypeAsBytes::System => Ok(T::system()), - AsymmetricTypeAsBytes::Ed25519(raw_bytes) => { - T::ed25519_from_bytes(raw_bytes).map_err(D::Error::custom) - } - AsymmetricTypeAsBytes::Secp256k1(raw_bytes) => { - T::secp256k1_from_bytes(raw_bytes).map_err(D::Error::custom) - } - } - } -} diff --git a/casper_types_ver_2_0/src/crypto/asymmetric_key/gens.rs b/casper_types_ver_2_0/src/crypto/asymmetric_key/gens.rs deleted file mode 100644 index 2316133a..00000000 --- a/casper_types_ver_2_0/src/crypto/asymmetric_key/gens.rs +++ /dev/null @@ -1,44 +0,0 @@ -//! Generators for asymmetric key types - -use core::convert::TryInto; - -use proptest::{ - collection, - prelude::{Arbitrary, Just, Strategy}, - prop_oneof, -}; - -use crate::{crypto::SecretKey, PublicKey}; - -/// Creates an arbitrary [`PublicKey`] -pub fn public_key_arb() -> impl Strategy { - prop_oneof![ - Just(PublicKey::System), - collection::vec(::arbitrary(), SecretKey::ED25519_LENGTH).prop_map(|bytes| { - let byte_array: [u8; SecretKey::ED25519_LENGTH] = bytes.try_into().unwrap(); - let secret_key = SecretKey::ed25519_from_bytes(byte_array).unwrap(); - PublicKey::from(&secret_key) - }), - collection::vec(::arbitrary(), SecretKey::SECP256K1_LENGTH).prop_map(|bytes| { - let bytes_array: [u8; SecretKey::SECP256K1_LENGTH] = bytes.try_into().unwrap(); - let secret_key = SecretKey::secp256k1_from_bytes(bytes_array).unwrap(); - PublicKey::from(&secret_key) - }) - ] -} - -/// Returns a strategy for creating random [`PublicKey`] instances but NOT system variant. -pub fn public_key_arb_no_system() -> impl Strategy { - prop_oneof![ - collection::vec(::arbitrary(), SecretKey::ED25519_LENGTH).prop_map(|bytes| { - let byte_array: [u8; SecretKey::ED25519_LENGTH] = bytes.try_into().unwrap(); - let secret_key = SecretKey::ed25519_from_bytes(byte_array).unwrap(); - PublicKey::from(&secret_key) - }), - collection::vec(::arbitrary(), SecretKey::SECP256K1_LENGTH).prop_map(|bytes| { - let bytes_array: [u8; SecretKey::SECP256K1_LENGTH] = bytes.try_into().unwrap(); - let secret_key = SecretKey::secp256k1_from_bytes(bytes_array).unwrap(); - PublicKey::from(&secret_key) - }) - ] -} diff --git a/casper_types_ver_2_0/src/crypto/asymmetric_key/tests.rs b/casper_types_ver_2_0/src/crypto/asymmetric_key/tests.rs deleted file mode 100644 index 545b8dad..00000000 --- a/casper_types_ver_2_0/src/crypto/asymmetric_key/tests.rs +++ /dev/null @@ -1,861 +0,0 @@ -use std::{ - cmp::Ordering, - collections::hash_map::DefaultHasher, - hash::{Hash, Hasher}, - iter, -}; - -use rand::RngCore; - -use k256::elliptic_curve::sec1::ToEncodedPoint; -use openssl::pkey::{PKey, Private, Public}; - -use super::*; -use crate::{ - bytesrepr, checksummed_hex, crypto::SecretKey, testing::TestRng, AsymmetricType, PublicKey, - Tagged, -}; - -#[test] -fn can_construct_ed25519_keypair_from_zeroes() { - let bytes = [0; SecretKey::ED25519_LENGTH]; - let secret_key = SecretKey::ed25519_from_bytes(bytes).unwrap(); - let _public_key: PublicKey = (&secret_key).into(); -} - -#[test] -#[should_panic] -fn cannot_construct_secp256k1_keypair_from_zeroes() { - let bytes = [0; SecretKey::SECP256K1_LENGTH]; - let secret_key = SecretKey::secp256k1_from_bytes(bytes).unwrap(); - let _public_key: PublicKey = (&secret_key).into(); -} - -#[test] -fn can_construct_ed25519_keypair_from_ones() { - let bytes = [1; SecretKey::ED25519_LENGTH]; - let secret_key = SecretKey::ed25519_from_bytes(bytes).unwrap(); - let _public_key: PublicKey = (&secret_key).into(); -} - -#[test] -fn can_construct_secp256k1_keypair_from_ones() { - let bytes = [1; SecretKey::SECP256K1_LENGTH]; - let secret_key = SecretKey::secp256k1_from_bytes(bytes).unwrap(); - let _public_key: PublicKey = (&secret_key).into(); -} - -type OpenSSLSecretKey = PKey; -type OpenSSLPublicKey = PKey; - -// `SecretKey` does not implement `PartialEq`, so just compare derived `PublicKey`s. -fn assert_secret_keys_equal(lhs: &SecretKey, rhs: &SecretKey) { - assert_eq!(PublicKey::from(lhs), PublicKey::from(rhs)); -} - -fn secret_key_der_roundtrip(secret_key: SecretKey) { - let der_encoded = secret_key.to_der().unwrap(); - let decoded = SecretKey::from_der(&der_encoded).unwrap(); - assert_secret_keys_equal(&secret_key, &decoded); - assert_eq!(secret_key.tag(), decoded.tag()); - - // Ensure malformed encoded version fails to decode. - SecretKey::from_der(&der_encoded[1..]).unwrap_err(); -} - -fn secret_key_pem_roundtrip(secret_key: SecretKey) { - let pem_encoded = secret_key.to_pem().unwrap(); - let decoded = SecretKey::from_pem(pem_encoded.as_bytes()).unwrap(); - assert_secret_keys_equal(&secret_key, &decoded); - assert_eq!(secret_key.tag(), decoded.tag()); - - // Check PEM-encoded can be decoded by openssl. - let _ = OpenSSLSecretKey::private_key_from_pem(pem_encoded.as_bytes()).unwrap(); - - // Ensure malformed encoded version fails to decode. - SecretKey::from_pem(&pem_encoded[1..]).unwrap_err(); -} - -fn known_secret_key_to_pem(expected_key: &SecretKey, known_key_pem: &str, expected_tag: u8) { - let decoded = SecretKey::from_pem(known_key_pem.as_bytes()).unwrap(); - assert_secret_keys_equal(expected_key, &decoded); - assert_eq!(expected_tag, decoded.tag()); -} - -fn secret_key_file_roundtrip(secret_key: SecretKey) { - let tempdir = tempfile::tempdir().unwrap(); - let path = tempdir.path().join("test_secret_key.pem"); - - secret_key.to_file(&path).unwrap(); - let decoded = SecretKey::from_file(&path).unwrap(); - assert_secret_keys_equal(&secret_key, &decoded); - assert_eq!(secret_key.tag(), decoded.tag()); -} - -fn public_key_serialization_roundtrip(public_key: PublicKey) { - // Try to/from bincode. - let serialized = bincode::serialize(&public_key).unwrap(); - let deserialized = bincode::deserialize(&serialized).unwrap(); - assert_eq!(public_key, deserialized); - assert_eq!(public_key.tag(), deserialized.tag()); - - // Try to/from JSON. - let serialized = serde_json::to_vec_pretty(&public_key).unwrap(); - let deserialized = serde_json::from_slice(&serialized).unwrap(); - assert_eq!(public_key, deserialized); - assert_eq!(public_key.tag(), deserialized.tag()); - - // Using bytesrepr. - bytesrepr::test_serialization_roundtrip(&public_key); -} - -fn public_key_der_roundtrip(public_key: PublicKey) { - let der_encoded = public_key.to_der().unwrap(); - let decoded = PublicKey::from_der(&der_encoded).unwrap(); - assert_eq!(public_key, decoded); - - // Check DER-encoded can be decoded by openssl. - let _ = OpenSSLPublicKey::public_key_from_der(&der_encoded).unwrap(); - - // Ensure malformed encoded version fails to decode. - PublicKey::from_der(&der_encoded[1..]).unwrap_err(); -} - -fn public_key_pem_roundtrip(public_key: PublicKey) { - let pem_encoded = public_key.to_pem().unwrap(); - let decoded = PublicKey::from_pem(pem_encoded.as_bytes()).unwrap(); - assert_eq!(public_key, decoded); - assert_eq!(public_key.tag(), decoded.tag()); - - // Check PEM-encoded can be decoded by openssl. - let _ = OpenSSLPublicKey::public_key_from_pem(pem_encoded.as_bytes()).unwrap(); - - // Ensure malformed encoded version fails to decode. - PublicKey::from_pem(&pem_encoded[1..]).unwrap_err(); -} - -fn known_public_key_to_pem(known_key_hex: &str, known_key_pem: &str) { - let key_bytes = checksummed_hex::decode(known_key_hex).unwrap(); - let decoded = PublicKey::from_pem(known_key_pem.as_bytes()).unwrap(); - assert_eq!(key_bytes, Into::>::into(decoded)); -} - -fn public_key_file_roundtrip(public_key: PublicKey) { - let tempdir = tempfile::tempdir().unwrap(); - let path = tempdir.path().join("test_public_key.pem"); - - public_key.to_file(&path).unwrap(); - let decoded = PublicKey::from_file(&path).unwrap(); - assert_eq!(public_key, decoded); -} - -fn public_key_hex_roundtrip(public_key: PublicKey) { - let hex_encoded = public_key.to_hex(); - let decoded = PublicKey::from_hex(&hex_encoded).unwrap(); - assert_eq!(public_key, decoded); - assert_eq!(public_key.tag(), decoded.tag()); - - // Ensure malformed encoded version fails to decode. - PublicKey::from_hex(&hex_encoded[..1]).unwrap_err(); - PublicKey::from_hex(&hex_encoded[1..]).unwrap_err(); -} - -fn signature_serialization_roundtrip(signature: Signature) { - // Try to/from bincode. - let serialized = bincode::serialize(&signature).unwrap(); - let deserialized: Signature = bincode::deserialize(&serialized).unwrap(); - assert_eq!(signature, deserialized); - assert_eq!(signature.tag(), deserialized.tag()); - - // Try to/from JSON. - let serialized = serde_json::to_vec_pretty(&signature).unwrap(); - let deserialized = serde_json::from_slice(&serialized).unwrap(); - assert_eq!(signature, deserialized); - assert_eq!(signature.tag(), deserialized.tag()); - - // Try to/from using bytesrepr. - let serialized = bytesrepr::serialize(signature).unwrap(); - let deserialized = bytesrepr::deserialize(serialized).unwrap(); - assert_eq!(signature, deserialized); - assert_eq!(signature.tag(), deserialized.tag()) -} - -fn signature_hex_roundtrip(signature: Signature) { - let hex_encoded = signature.to_hex(); - let decoded = Signature::from_hex(hex_encoded.as_bytes()).unwrap(); - assert_eq!(signature, decoded); - assert_eq!(signature.tag(), decoded.tag()); - - // Ensure malformed encoded version fails to decode. - Signature::from_hex(&hex_encoded[..1]).unwrap_err(); - Signature::from_hex(&hex_encoded[1..]).unwrap_err(); -} - -fn hash(data: &T) -> u64 { - let mut hasher = DefaultHasher::new(); - data.hash(&mut hasher); - hasher.finish() -} - -fn check_ord_and_hash(low: T, high: T) { - let low_copy = low.clone(); - - assert_eq!(hash(&low), hash(&low_copy)); - assert_ne!(hash(&low), hash(&high)); - - assert_eq!(Ordering::Less, low.cmp(&high)); - assert_eq!(Some(Ordering::Less), low.partial_cmp(&high)); - - assert_eq!(Ordering::Greater, high.cmp(&low)); - assert_eq!(Some(Ordering::Greater), high.partial_cmp(&low)); - - assert_eq!(Ordering::Equal, low.cmp(&low_copy)); - assert_eq!(Some(Ordering::Equal), low.partial_cmp(&low_copy)); -} - -mod system { - use std::path::Path; - - use super::{sign, verify}; - use crate::crypto::{AsymmetricType, PublicKey, SecretKey, Signature}; - - #[test] - fn secret_key_to_der_should_error() { - assert!(SecretKey::system().to_der().is_err()); - } - - #[test] - fn secret_key_to_pem_should_error() { - assert!(SecretKey::system().to_pem().is_err()); - } - - #[test] - fn secret_key_to_file_should_error() { - assert!(SecretKey::system().to_file(Path::new("/dev/null")).is_err()); - } - - #[test] - fn public_key_serialization_roundtrip() { - super::public_key_serialization_roundtrip(PublicKey::system()); - } - - #[test] - fn public_key_to_der_should_error() { - assert!(PublicKey::system().to_der().is_err()); - } - - #[test] - fn public_key_to_pem_should_error() { - assert!(PublicKey::system().to_pem().is_err()); - } - - #[test] - fn public_key_to_file_should_error() { - assert!(PublicKey::system().to_file(Path::new("/dev/null")).is_err()); - } - - #[test] - fn public_key_to_and_from_hex() { - super::public_key_hex_roundtrip(PublicKey::system()); - } - - #[test] - #[should_panic] - fn sign_should_panic() { - sign([], &SecretKey::system(), &PublicKey::system()); - } - - #[test] - fn signature_to_and_from_hex() { - super::signature_hex_roundtrip(Signature::system()); - } - - #[test] - fn public_key_to_account_hash() { - assert_ne!( - PublicKey::system().to_account_hash().as_ref(), - Into::>::into(PublicKey::system()) - ); - } - - #[test] - fn verify_should_error() { - assert!(verify([], &Signature::system(), &PublicKey::system()).is_err()); - } - - #[test] - fn bytesrepr_roundtrip_signature() { - crate::bytesrepr::test_serialization_roundtrip(&Signature::system()); - } -} - -mod ed25519 { - use rand::Rng; - - use super::*; - use crate::ED25519_TAG; - - const SECRET_KEY_LENGTH: usize = SecretKey::ED25519_LENGTH; - const PUBLIC_KEY_LENGTH: usize = PublicKey::ED25519_LENGTH; - const SIGNATURE_LENGTH: usize = Signature::ED25519_LENGTH; - - #[test] - fn secret_key_from_bytes() { - // Secret key should be `SecretKey::ED25519_LENGTH` bytes. - let bytes = [0; SECRET_KEY_LENGTH + 1]; - assert!(SecretKey::ed25519_from_bytes(&bytes[..]).is_err()); - assert!(SecretKey::ed25519_from_bytes(&bytes[2..]).is_err()); - - // Check the same bytes but of the right length succeeds. - assert!(SecretKey::ed25519_from_bytes(&bytes[1..]).is_ok()); - } - - #[test] - fn secret_key_to_and_from_der() { - let mut rng = TestRng::new(); - let secret_key = SecretKey::random_ed25519(&mut rng); - let der_encoded = secret_key.to_der().unwrap(); - secret_key_der_roundtrip(secret_key); - - // Check DER-encoded can be decoded by openssl. - let _ = OpenSSLSecretKey::private_key_from_der(&der_encoded).unwrap(); - } - - #[test] - fn secret_key_to_and_from_pem() { - let mut rng = TestRng::new(); - let secret_key = SecretKey::random_ed25519(&mut rng); - secret_key_pem_roundtrip(secret_key); - } - - #[test] - fn known_secret_key_to_pem() { - // Example values taken from https://tools.ietf.org/html/rfc8410#section-10.3 - const KNOWN_KEY_PEM: &str = r#"-----BEGIN PRIVATE KEY----- -MC4CAQAwBQYDK2VwBCIEINTuctv5E1hK1bbY8fdp+K06/nwoy/HU++CXqI9EdVhC ------END PRIVATE KEY-----"#; - let key_bytes = - base16::decode("d4ee72dbf913584ad5b6d8f1f769f8ad3afe7c28cbf1d4fbe097a88f44755842") - .unwrap(); - let expected_key = SecretKey::ed25519_from_bytes(key_bytes).unwrap(); - super::known_secret_key_to_pem(&expected_key, KNOWN_KEY_PEM, ED25519_TAG); - } - - #[test] - fn secret_key_to_and_from_file() { - let mut rng = TestRng::new(); - let secret_key = SecretKey::random_ed25519(&mut rng); - secret_key_file_roundtrip(secret_key); - } - - #[test] - fn public_key_serialization_roundtrip() { - let mut rng = TestRng::new(); - let public_key = PublicKey::random_ed25519(&mut rng); - super::public_key_serialization_roundtrip(public_key); - } - - #[test] - fn public_key_from_bytes() { - // Public key should be `PublicKey::ED25519_LENGTH` bytes. Create vec with an extra - // byte. - let mut rng = TestRng::new(); - let public_key = PublicKey::random_ed25519(&mut rng); - let bytes: Vec = iter::once(rng.gen()) - .chain(Into::>::into(public_key)) - .collect::>(); - - assert!(PublicKey::ed25519_from_bytes(&bytes[..]).is_err()); - assert!(PublicKey::ed25519_from_bytes(&bytes[2..]).is_err()); - - // Check the same bytes but of the right length succeeds. - assert!(PublicKey::ed25519_from_bytes(&bytes[1..]).is_ok()); - } - - #[test] - fn public_key_to_and_from_der() { - let mut rng = TestRng::new(); - let public_key = PublicKey::random_ed25519(&mut rng); - public_key_der_roundtrip(public_key); - } - - #[test] - fn public_key_to_and_from_pem() { - let mut rng = TestRng::new(); - let public_key = PublicKey::random_ed25519(&mut rng); - public_key_pem_roundtrip(public_key); - } - - #[test] - fn known_public_key_to_pem() { - // Example values taken from https://tools.ietf.org/html/rfc8410#section-10.1 - const KNOWN_KEY_HEX: &str = - "19bf44096984cdfe8541bac167dc3b96c85086aa30b6b6cb0c5c38ad703166e1"; - const KNOWN_KEY_PEM: &str = r#"-----BEGIN PUBLIC KEY----- -MCowBQYDK2VwAyEAGb9ECWmEzf6FQbrBZ9w7lshQhqowtrbLDFw4rXAxZuE= ------END PUBLIC KEY-----"#; - super::known_public_key_to_pem(KNOWN_KEY_HEX, KNOWN_KEY_PEM); - } - - #[test] - fn public_key_to_and_from_file() { - let mut rng = TestRng::new(); - let public_key = PublicKey::random_ed25519(&mut rng); - public_key_file_roundtrip(public_key); - } - - #[test] - fn public_key_to_and_from_hex() { - let mut rng = TestRng::new(); - let public_key = PublicKey::random_ed25519(&mut rng); - public_key_hex_roundtrip(public_key); - } - - #[test] - fn signature_serialization_roundtrip() { - let mut rng = TestRng::new(); - let secret_key = SecretKey::random_ed25519(&mut rng); - let public_key = PublicKey::from(&secret_key); - let data = b"data"; - let signature = sign(data, &secret_key, &public_key); - super::signature_serialization_roundtrip(signature); - } - - #[test] - fn signature_from_bytes() { - // Signature should be `Signature::ED25519_LENGTH` bytes. - let bytes = [2; SIGNATURE_LENGTH + 1]; - assert!(Signature::ed25519_from_bytes(&bytes[..]).is_err()); - assert!(Signature::ed25519_from_bytes(&bytes[2..]).is_err()); - - // Check the same bytes but of the right length succeeds. - assert!(Signature::ed25519_from_bytes(&bytes[1..]).is_ok()); - } - - #[test] - fn signature_key_to_and_from_hex() { - let mut rng = TestRng::new(); - let secret_key = SecretKey::random_ed25519(&mut rng); - let public_key = PublicKey::from(&secret_key); - let data = b"data"; - let signature = sign(data, &secret_key, &public_key); - signature_hex_roundtrip(signature); - } - - #[test] - fn public_key_traits() { - let public_key_low = PublicKey::ed25519_from_bytes([1; PUBLIC_KEY_LENGTH]).unwrap(); - let public_key_high = PublicKey::ed25519_from_bytes([3; PUBLIC_KEY_LENGTH]).unwrap(); - check_ord_and_hash(public_key_low, public_key_high) - } - - #[test] - fn public_key_to_account_hash() { - let public_key_high = PublicKey::ed25519_from_bytes([255; PUBLIC_KEY_LENGTH]).unwrap(); - assert_ne!( - public_key_high.to_account_hash().as_ref(), - Into::>::into(public_key_high) - ); - } - - #[test] - fn signature_traits() { - let signature_low = Signature::ed25519([1; SIGNATURE_LENGTH]).unwrap(); - let signature_high = Signature::ed25519([3; SIGNATURE_LENGTH]).unwrap(); - check_ord_and_hash(signature_low, signature_high) - } - - #[test] - fn sign_and_verify() { - let mut rng = TestRng::new(); - let secret_key = SecretKey::random_ed25519(&mut rng); - - let public_key = PublicKey::from(&secret_key); - let other_public_key = PublicKey::random_ed25519(&mut rng); - let wrong_type_public_key = PublicKey::random_secp256k1(&mut rng); - - let message = b"message"; - let signature = sign(message, &secret_key, &public_key); - - assert!(verify(message, &signature, &public_key).is_ok()); - assert!(verify(message, &signature, &other_public_key).is_err()); - assert!(verify(message, &signature, &wrong_type_public_key).is_err()); - assert!(verify(&message[1..], &signature, &public_key).is_err()); - } - - #[test] - fn bytesrepr_roundtrip_signature() { - let mut rng = TestRng::new(); - let ed25519_secret_key = SecretKey::random_ed25519(&mut rng); - let public_key = PublicKey::from(&ed25519_secret_key); - let data = b"data"; - let signature = sign(data, &ed25519_secret_key, &public_key); - bytesrepr::test_serialization_roundtrip(&signature); - } - - #[test] - fn validate_known_signature() { - // In the event that this test fails, we need to consider pinning the version of the - // `ed25519-dalek` crate to maintain backwards compatibility with existing data on the - // Casper network. - - // Values taken from: - // https://github.com/dalek-cryptography/ed25519-dalek/blob/925eb9ea56192053c9eb93b9d30d1b9419eee128/TESTVECTORS#L62 - let secret_key_hex = "bf5ba5d6a49dd5ef7b4d5d7d3e4ecc505c01f6ccee4c54b5ef7b40af6a454140"; - let public_key_hex = "1be034f813017b900d8990af45fad5b5214b573bd303ef7a75ef4b8c5c5b9842"; - let message_hex = - "16152c2e037b1c0d3219ced8e0674aee6b57834b55106c5344625322da638ecea2fc9a424a05ee9512\ - d48fcf75dd8bd4691b3c10c28ec98ee1afa5b863d1c36795ed18105db3a9aabd9d2b4c1747adbaf1a56\ - ffcc0c533c1c0faef331cdb79d961fa39f880a1b8b1164741822efb15a7259a465bef212855751fab66\ - a897bfa211abe0ea2f2e1cd8a11d80e142cde1263eec267a3138ae1fcf4099db0ab53d64f336f4bcd7a\ - 363f6db112c0a2453051a0006f813aaf4ae948a2090619374fa58052409c28ef76225687df3cb2d1b0b\ - fb43b09f47f1232f790e6d8dea759e57942099f4c4bd3390f28afc2098244961465c643fc8b29766af2\ - bcbc5440b86e83608cfc937be98bb4827fd5e6b689adc2e26513db531076a6564396255a09975b7034d\ - ac06461b255642e3a7ed75fa9fc265011f5f6250382a84ac268d63ba64"; - let signature_hex = - "279cace6fdaf3945e3837df474b28646143747632bede93e7a66f5ca291d2c24978512ca0cb8827c8c\ - 322685bd605503a5ec94dbae61bbdcae1e49650602bc07"; - - let secret_key_bytes = base16::decode(secret_key_hex).unwrap(); - let public_key_bytes = base16::decode(public_key_hex).unwrap(); - let message_bytes = base16::decode(message_hex).unwrap(); - let signature_bytes = base16::decode(signature_hex).unwrap(); - - let secret_key = SecretKey::ed25519_from_bytes(secret_key_bytes).unwrap(); - let public_key = PublicKey::ed25519_from_bytes(public_key_bytes).unwrap(); - assert_eq!(public_key, PublicKey::from(&secret_key)); - - let signature = Signature::ed25519_from_bytes(signature_bytes).unwrap(); - assert_eq!(sign(&message_bytes, &secret_key, &public_key), signature); - assert!(verify(&message_bytes, &signature, &public_key).is_ok()); - } -} - -mod secp256k1 { - use rand::Rng; - - use super::*; - use crate::SECP256K1_TAG; - - const SECRET_KEY_LENGTH: usize = SecretKey::SECP256K1_LENGTH; - const SIGNATURE_LENGTH: usize = Signature::SECP256K1_LENGTH; - - #[test] - fn secret_key_from_bytes() { - // Secret key should be `SecretKey::SECP256K1_LENGTH` bytes. - // The k256 library will ensure that a byte stream of a length not equal to - // `SECP256K1_LENGTH` will fail due to an assertion internal to the library. - // We can check that invalid byte streams e.g [0;32] does not generate a valid key. - let bytes = [0; SECRET_KEY_LENGTH]; - assert!(SecretKey::secp256k1_from_bytes(&bytes[..]).is_err()); - - // Check that a valid byte stream produces a valid key - let bytes = [1; SECRET_KEY_LENGTH]; - assert!(SecretKey::secp256k1_from_bytes(&bytes[..]).is_ok()); - } - - #[test] - fn secret_key_to_and_from_der() { - let mut rng = TestRng::new(); - let secret_key = SecretKey::random_secp256k1(&mut rng); - secret_key_der_roundtrip(secret_key); - } - - #[test] - fn secret_key_to_and_from_pem() { - let mut rng = TestRng::new(); - let secret_key = SecretKey::random_secp256k1(&mut rng); - secret_key_pem_roundtrip(secret_key); - } - - #[test] - fn known_secret_key_to_pem() { - // Example values taken from Python client. - const KNOWN_KEY_PEM: &str = r#"-----BEGIN EC PRIVATE KEY----- -MHQCAQEEIL3fqaMKAfXSK1D2PnVVbZlZ7jTv133nukq4+95s6kmcoAcGBSuBBAAK -oUQDQgAEQI6VJjFv0fje9IDdRbLMcv/XMnccnOtdkv+kBR5u4ISEAkuc2TFWQHX0 -Yj9oTB9fx9+vvQdxJOhMtu46kGo0Uw== ------END EC PRIVATE KEY-----"#; - let key_bytes = - base16::decode("bddfa9a30a01f5d22b50f63e75556d9959ee34efd77de7ba4ab8fbde6cea499c") - .unwrap(); - let expected_key = SecretKey::secp256k1_from_bytes(key_bytes).unwrap(); - super::known_secret_key_to_pem(&expected_key, KNOWN_KEY_PEM, SECP256K1_TAG); - } - - #[test] - fn secret_key_to_and_from_file() { - let mut rng = TestRng::new(); - let secret_key = SecretKey::random_secp256k1(&mut rng); - secret_key_file_roundtrip(secret_key); - } - - #[test] - fn public_key_serialization_roundtrip() { - let mut rng = TestRng::new(); - let public_key = PublicKey::random_secp256k1(&mut rng); - super::public_key_serialization_roundtrip(public_key); - } - - #[test] - fn public_key_from_bytes() { - // Public key should be `PublicKey::SECP256K1_LENGTH` bytes. Create vec with an extra - // byte. - let mut rng = TestRng::new(); - let public_key = PublicKey::random_secp256k1(&mut rng); - let bytes: Vec = iter::once(rng.gen()) - .chain(Into::>::into(public_key)) - .collect::>(); - - assert!(PublicKey::secp256k1_from_bytes(&bytes[..]).is_err()); - assert!(PublicKey::secp256k1_from_bytes(&bytes[2..]).is_err()); - - // Check the same bytes but of the right length succeeds. - assert!(PublicKey::secp256k1_from_bytes(&bytes[1..]).is_ok()); - } - - #[test] - fn public_key_to_and_from_der() { - let mut rng = TestRng::new(); - let public_key = PublicKey::random_secp256k1(&mut rng); - public_key_der_roundtrip(public_key); - } - - #[test] - fn public_key_to_and_from_pem() { - let mut rng = TestRng::new(); - let public_key = PublicKey::random_secp256k1(&mut rng); - public_key_pem_roundtrip(public_key); - } - - #[test] - fn known_public_key_to_pem() { - // Example values taken from Python client. - const KNOWN_KEY_HEX: &str = - "03408e9526316fd1f8def480dd45b2cc72ffd732771c9ceb5d92ffa4051e6ee084"; - const KNOWN_KEY_PEM: &str = r#"-----BEGIN PUBLIC KEY----- -MFYwEAYHKoZIzj0CAQYFK4EEAAoDQgAEQI6VJjFv0fje9IDdRbLMcv/XMnccnOtd -kv+kBR5u4ISEAkuc2TFWQHX0Yj9oTB9fx9+vvQdxJOhMtu46kGo0Uw== ------END PUBLIC KEY-----"#; - super::known_public_key_to_pem(KNOWN_KEY_HEX, KNOWN_KEY_PEM); - } - - #[test] - fn public_key_to_and_from_file() { - let mut rng = TestRng::new(); - let public_key = PublicKey::random_secp256k1(&mut rng); - public_key_file_roundtrip(public_key); - } - - #[test] - fn public_key_to_and_from_hex() { - let mut rng = TestRng::new(); - let public_key = PublicKey::random_secp256k1(&mut rng); - public_key_hex_roundtrip(public_key); - } - - #[test] - fn signature_serialization_roundtrip() { - let mut rng = TestRng::new(); - let secret_key = SecretKey::random_secp256k1(&mut rng); - let public_key = PublicKey::from(&secret_key); - let data = b"data"; - let signature = sign(data, &secret_key, &public_key); - super::signature_serialization_roundtrip(signature); - } - - #[test] - fn bytesrepr_roundtrip_signature() { - let mut rng = TestRng::new(); - let secret_key = SecretKey::random_secp256k1(&mut rng); - let public_key = PublicKey::from(&secret_key); - let data = b"data"; - let signature = sign(data, &secret_key, &public_key); - bytesrepr::test_serialization_roundtrip(&signature); - } - - #[test] - fn signature_from_bytes() { - // Signature should be `Signature::SECP256K1_LENGTH` bytes. - let bytes = [2; SIGNATURE_LENGTH + 1]; - assert!(Signature::secp256k1_from_bytes(&bytes[..]).is_err()); - assert!(Signature::secp256k1_from_bytes(&bytes[2..]).is_err()); - - // Check the same bytes but of the right length succeeds. - assert!(Signature::secp256k1_from_bytes(&bytes[1..]).is_ok()); - } - - #[test] - fn signature_key_to_and_from_hex() { - let mut rng = TestRng::new(); - let secret_key = SecretKey::random_secp256k1(&mut rng); - let public_key = PublicKey::from(&secret_key); - let data = b"data"; - let signature = sign(data, &secret_key, &public_key); - signature_hex_roundtrip(signature); - } - - #[test] - fn public_key_traits() { - let mut rng = TestRng::new(); - let public_key1 = PublicKey::random_secp256k1(&mut rng); - let public_key2 = PublicKey::random_secp256k1(&mut rng); - if Into::>::into(public_key1.clone()) < Into::>::into(public_key2.clone()) { - check_ord_and_hash(public_key1, public_key2) - } else { - check_ord_and_hash(public_key2, public_key1) - } - } - - #[test] - fn public_key_to_account_hash() { - let mut rng = TestRng::new(); - let public_key = PublicKey::random_secp256k1(&mut rng); - assert_ne!( - public_key.to_account_hash().as_ref(), - Into::>::into(public_key) - ); - } - - #[test] - fn signature_traits() { - let signature_low = Signature::secp256k1([1; SIGNATURE_LENGTH]).unwrap(); - let signature_high = Signature::secp256k1([3; SIGNATURE_LENGTH]).unwrap(); - check_ord_and_hash(signature_low, signature_high) - } - - #[test] - fn validate_known_signature() { - // In the event that this test fails, we need to consider pinning the version of the - // `k256` crate to maintain backwards compatibility with existing data on the Casper - // network. - let secret_key_hex = "833fe62409237b9d62ec77587520911e9a759cec1d19755b7da901b96dca3d42"; - let public_key_hex = "028e24fd9654f12c793d3d376c15f7abe53e0fbd537884a3a98d10d2dc6d513b4e"; - let message_hex = "616263"; - let signature_hex = "8016162860f0795154643d15c5ab5bb840d8c695d6de027421755579ea7f2a4629b7e0c88fc3428669a6a89496f426181b73f10c6c8a05ac8f49d6cb5032eb89"; - - let secret_key_bytes = base16::decode(secret_key_hex).unwrap(); - let public_key_bytes = base16::decode(public_key_hex).unwrap(); - let message_bytes = base16::decode(message_hex).unwrap(); - let signature_bytes = base16::decode(signature_hex).unwrap(); - - let secret_key = SecretKey::secp256k1_from_bytes(secret_key_bytes).unwrap(); - let public_key = PublicKey::secp256k1_from_bytes(public_key_bytes).unwrap(); - assert_eq!(public_key, PublicKey::from(&secret_key)); - - let signature = Signature::secp256k1_from_bytes(signature_bytes).unwrap(); - assert_eq!(sign(&message_bytes, &secret_key, &public_key), signature); - assert!(verify(&message_bytes, &signature, &public_key).is_ok()); - } -} - -#[test] -fn public_key_traits() { - let system_key = PublicKey::system(); - let mut rng = TestRng::new(); - let ed25519_public_key = PublicKey::random_ed25519(&mut rng); - let secp256k1_public_key = PublicKey::random_secp256k1(&mut rng); - check_ord_and_hash(ed25519_public_key.clone(), secp256k1_public_key.clone()); - check_ord_and_hash(system_key.clone(), ed25519_public_key); - check_ord_and_hash(system_key, secp256k1_public_key); -} - -#[test] -fn signature_traits() { - let system_sig = Signature::system(); - let ed25519_sig = Signature::ed25519([3; Signature::ED25519_LENGTH]).unwrap(); - let secp256k1_sig = Signature::secp256k1([1; Signature::SECP256K1_LENGTH]).unwrap(); - check_ord_and_hash(ed25519_sig, secp256k1_sig); - check_ord_and_hash(system_sig, ed25519_sig); - check_ord_and_hash(system_sig, secp256k1_sig); -} - -#[test] -fn sign_and_verify() { - let mut rng = TestRng::new(); - let ed25519_secret_key = SecretKey::random_ed25519(&mut rng); - let secp256k1_secret_key = SecretKey::random_secp256k1(&mut rng); - - let ed25519_public_key = PublicKey::from(&ed25519_secret_key); - let secp256k1_public_key = PublicKey::from(&secp256k1_secret_key); - - let other_ed25519_public_key = PublicKey::random_ed25519(&mut rng); - let other_secp256k1_public_key = PublicKey::random_secp256k1(&mut rng); - - let message = b"message"; - let ed25519_signature = sign(message, &ed25519_secret_key, &ed25519_public_key); - let secp256k1_signature = sign(message, &secp256k1_secret_key, &secp256k1_public_key); - - assert!(verify(message, &ed25519_signature, &ed25519_public_key).is_ok()); - assert!(verify(message, &secp256k1_signature, &secp256k1_public_key).is_ok()); - - assert!(verify(message, &ed25519_signature, &other_ed25519_public_key).is_err()); - assert!(verify(message, &secp256k1_signature, &other_secp256k1_public_key).is_err()); - - assert!(verify(message, &ed25519_signature, &secp256k1_public_key).is_err()); - assert!(verify(message, &secp256k1_signature, &ed25519_public_key).is_err()); - - assert!(verify(&message[1..], &ed25519_signature, &ed25519_public_key).is_err()); - assert!(verify(&message[1..], &secp256k1_signature, &secp256k1_public_key).is_err()); -} - -#[test] -fn should_construct_secp256k1_from_uncompressed_bytes() { - let mut rng = TestRng::new(); - - let mut secret_key_bytes = [0u8; SecretKey::SECP256K1_LENGTH]; - rng.fill_bytes(&mut secret_key_bytes[..]); - - // Construct a secp256k1 secret key and use that to construct a public key. - let secp256k1_secret_key = k256::SecretKey::from_slice(&secret_key_bytes).unwrap(); - let secp256k1_public_key = secp256k1_secret_key.public_key(); - - // Construct a CL secret key and public key from that (which will be a compressed key). - let secret_key = SecretKey::secp256k1_from_bytes(secret_key_bytes).unwrap(); - let public_key = PublicKey::from(&secret_key); - assert_eq!( - Into::>::into(public_key.clone()).len(), - PublicKey::SECP256K1_LENGTH - ); - assert_ne!( - secp256k1_public_key - .to_encoded_point(false) - .as_bytes() - .len(), - PublicKey::SECP256K1_LENGTH - ); - - // Construct a CL public key from uncompressed public key bytes and ensure it's compressed. - let from_uncompressed_bytes = - PublicKey::secp256k1_from_bytes(secp256k1_public_key.to_encoded_point(false).as_bytes()) - .unwrap(); - assert_eq!(public_key, from_uncompressed_bytes); - - // Construct a CL public key from the uncompressed one's hex representation and ensure it's - // compressed. - let uncompressed_hex = { - let tag_bytes = vec![0x02u8]; - base16::encode_lower(&tag_bytes) - + &base16::encode_lower(&secp256k1_public_key.to_encoded_point(false).as_bytes()) - }; - - format!( - "02{}", - base16::encode_lower(secp256k1_public_key.to_encoded_point(false).as_bytes()) - .to_lowercase() - ); - let from_uncompressed_hex = PublicKey::from_hex(uncompressed_hex).unwrap(); - assert_eq!(public_key, from_uncompressed_hex); -} - -#[test] -fn generate_ed25519_should_generate_an_ed25519_key() { - let secret_key = SecretKey::generate_ed25519().unwrap(); - assert!(matches!(secret_key, SecretKey::Ed25519(_))) -} - -#[test] -fn generate_secp256k1_should_generate_an_secp256k1_key() { - let secret_key = SecretKey::generate_secp256k1().unwrap(); - assert!(matches!(secret_key, SecretKey::Secp256k1(_))) -} diff --git a/casper_types_ver_2_0/src/crypto/error.rs b/casper_types_ver_2_0/src/crypto/error.rs deleted file mode 100644 index a4d822aa..00000000 --- a/casper_types_ver_2_0/src/crypto/error.rs +++ /dev/null @@ -1,155 +0,0 @@ -use alloc::string::String; -use core::fmt::{self, Display, Formatter}; -#[cfg(any(feature = "std", test))] -use std::error::Error as StdError; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -use ed25519_dalek::ed25519::Error as SignatureError; -#[cfg(any(feature = "std", test))] -use pem::PemError; -use serde::Serialize; -#[cfg(any(feature = "std", test))] -use thiserror::Error; - -#[cfg(any(feature = "std", test))] -use crate::file_utils::{ReadFileError, WriteFileError}; - -/// Cryptographic errors. -#[derive(Clone, Eq, PartialEq, Debug, Serialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[non_exhaustive] -pub enum Error { - /// Error resulting from creating or using asymmetric key types. - AsymmetricKey(String), - - /// Error resulting when decoding a type from a hex-encoded representation. - #[serde(with = "serde_helpers::Base16DecodeError")] - #[cfg_attr(feature = "datasize", data_size(skip))] - FromHex(base16::DecodeError), - - /// Error resulting when decoding a type from a base64 representation. - #[serde(with = "serde_helpers::Base64DecodeError")] - #[cfg_attr(feature = "datasize", data_size(skip))] - FromBase64(base64::DecodeError), - - /// Signature error. - SignatureError, - - /// Error trying to manipulate the system key. - System(String), -} - -impl Display for Error { - fn fmt(&self, formatter: &mut Formatter<'_>) -> fmt::Result { - match self { - Error::AsymmetricKey(error_msg) => { - write!(formatter, "asymmetric key error: {}", error_msg) - } - Error::FromHex(error) => { - write!(formatter, "decoding from hex: {}", error) - } - Error::FromBase64(error) => { - write!(formatter, "decoding from base 64: {}", error) - } - Error::SignatureError => { - write!(formatter, "error in signature") - } - Error::System(error_msg) => { - write!(formatter, "invalid operation on system key: {}", error_msg) - } - } - } -} - -impl From for Error { - fn from(error: base16::DecodeError) -> Self { - Error::FromHex(error) - } -} - -impl From for Error { - fn from(_error: SignatureError) -> Self { - Error::SignatureError - } -} - -#[cfg(any(feature = "std", test))] -impl StdError for Error { - fn source(&self) -> Option<&(dyn StdError + 'static)> { - match self { - Error::FromHex(error) => Some(error), - Error::FromBase64(error) => Some(error), - Error::AsymmetricKey(_) | Error::SignatureError | Error::System(_) => None, - } - } -} - -/// Cryptographic errors extended with some additional variants. -#[cfg(any(feature = "std", test))] -#[derive(Debug, Error)] -#[non_exhaustive] -pub enum ErrorExt { - /// A basic crypto error. - #[error("crypto error: {0:?}")] - CryptoError(#[from] Error), - - /// Error trying to read a secret key. - #[error("secret key load failed: {0}")] - SecretKeyLoad(ReadFileError), - - /// Error trying to read a public key. - #[error("public key load failed: {0}")] - PublicKeyLoad(ReadFileError), - - /// Error trying to write a secret key. - #[error("secret key save failed: {0}")] - SecretKeySave(WriteFileError), - - /// Error trying to write a public key. - #[error("public key save failed: {0}")] - PublicKeySave(WriteFileError), - - /// Pem format error. - #[error("pem error: {0}")] - FromPem(String), - - /// DER format error. - #[error("der error: {0}")] - FromDer(#[from] derp::Error), - - /// Error in getting random bytes from the system's preferred random number source. - #[error("failed to get random bytes: {0}")] - GetRandomBytes(#[from] getrandom::Error), -} - -#[cfg(any(feature = "std", test))] -impl From for ErrorExt { - fn from(error: PemError) -> Self { - ErrorExt::FromPem(error.to_string()) - } -} - -/// This module allows us to derive `Serialize` for the third party error types which don't -/// themselves derive it. -/// -/// See for more info. -#[allow(clippy::enum_variant_names)] -mod serde_helpers { - use serde::Serialize; - - #[derive(Serialize)] - #[serde(remote = "base16::DecodeError")] - pub(super) enum Base16DecodeError { - InvalidByte { index: usize, byte: u8 }, - InvalidLength { length: usize }, - } - - #[derive(Serialize)] - #[serde(remote = "base64::DecodeError")] - pub(super) enum Base64DecodeError { - InvalidByte(usize, u8), - InvalidLength, - InvalidLastSymbol(usize, u8), - } -} diff --git a/casper_types_ver_2_0/src/deploy_info.rs b/casper_types_ver_2_0/src/deploy_info.rs deleted file mode 100644 index faa51e74..00000000 --- a/casper_types_ver_2_0/src/deploy_info.rs +++ /dev/null @@ -1,174 +0,0 @@ -use alloc::vec::Vec; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -use crate::{ - account::AccountHash, - bytesrepr::{self, FromBytes, ToBytes}, - serde_helpers, DeployHash, TransferAddr, URef, U512, -}; - -/// Information relating to the given Deploy. -#[derive(Debug, Clone, Ord, PartialOrd, Eq, PartialEq, Serialize, Deserialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[serde(deny_unknown_fields)] -pub struct DeployInfo { - /// The relevant Deploy. - #[serde(with = "serde_helpers::deploy_hash_as_array")] - #[cfg_attr( - feature = "json-schema", - schemars(with = "DeployHash", description = "Hex-encoded Deploy hash.") - )] - pub deploy_hash: DeployHash, - /// Transfers performed by the Deploy. - pub transfers: Vec, - /// Account identifier of the creator of the Deploy. - pub from: AccountHash, - /// Source purse used for payment of the Deploy. - pub source: URef, - /// Gas cost of executing the Deploy. - pub gas: U512, -} - -impl DeployInfo { - /// Creates a [`DeployInfo`]. - pub fn new( - deploy_hash: DeployHash, - transfers: &[TransferAddr], - from: AccountHash, - source: URef, - gas: U512, - ) -> Self { - let transfers = transfers.to_vec(); - DeployInfo { - deploy_hash, - transfers, - from, - source, - gas, - } - } -} - -impl FromBytes for DeployInfo { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (deploy_hash, rem) = DeployHash::from_bytes(bytes)?; - let (transfers, rem) = Vec::::from_bytes(rem)?; - let (from, rem) = AccountHash::from_bytes(rem)?; - let (source, rem) = URef::from_bytes(rem)?; - let (gas, rem) = U512::from_bytes(rem)?; - Ok(( - DeployInfo { - deploy_hash, - transfers, - from, - source, - gas, - }, - rem, - )) - } -} - -impl ToBytes for DeployInfo { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut result = bytesrepr::allocate_buffer(self)?; - self.deploy_hash.write_bytes(&mut result)?; - self.transfers.write_bytes(&mut result)?; - self.from.write_bytes(&mut result)?; - self.source.write_bytes(&mut result)?; - self.gas.write_bytes(&mut result)?; - Ok(result) - } - - fn serialized_length(&self) -> usize { - self.deploy_hash.serialized_length() - + self.transfers.serialized_length() - + self.from.serialized_length() - + self.source.serialized_length() - + self.gas.serialized_length() - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.deploy_hash.write_bytes(writer)?; - self.transfers.write_bytes(writer)?; - self.from.write_bytes(writer)?; - self.source.write_bytes(writer)?; - self.gas.write_bytes(writer)?; - Ok(()) - } -} - -/// Generators for a `Deploy` -#[cfg(any(feature = "testing", feature = "gens", test))] -pub(crate) mod gens { - use alloc::vec::Vec; - - use proptest::{ - array, - collection::{self, SizeRange}, - prelude::{Arbitrary, Strategy}, - }; - - use crate::{ - account::AccountHash, - gens::{u512_arb, uref_arb}, - DeployHash, DeployInfo, TransferAddr, - }; - - pub fn deploy_hash_arb() -> impl Strategy { - array::uniform32(::arbitrary()).prop_map(DeployHash::from_raw) - } - - pub fn transfer_addr_arb() -> impl Strategy { - array::uniform32(::arbitrary()).prop_map(TransferAddr::new) - } - - pub fn transfers_arb(size: impl Into) -> impl Strategy> { - collection::vec(transfer_addr_arb(), size) - } - - pub fn account_hash_arb() -> impl Strategy { - array::uniform32(::arbitrary()).prop_map(AccountHash::new) - } - - /// Creates an arbitrary `Deploy` - pub fn deploy_info_arb() -> impl Strategy { - let transfers_length_range = 0..5; - ( - deploy_hash_arb(), - transfers_arb(transfers_length_range), - account_hash_arb(), - uref_arb(), - u512_arb(), - ) - .prop_map(|(deploy_hash, transfers, from, source, gas)| DeployInfo { - deploy_hash, - transfers, - from, - source, - gas, - }) - } -} - -#[cfg(test)] -mod tests { - use proptest::prelude::*; - - use crate::bytesrepr; - - use super::gens; - - proptest! { - #[test] - fn test_serialization_roundtrip(deploy_info in gens::deploy_info_arb()) { - bytesrepr::test_serialization_roundtrip(&deploy_info) - } - } -} diff --git a/casper_types_ver_2_0/src/digest.rs b/casper_types_ver_2_0/src/digest.rs deleted file mode 100644 index 31a5d77e..00000000 --- a/casper_types_ver_2_0/src/digest.rs +++ /dev/null @@ -1,730 +0,0 @@ -//! Contains digest and merkle chunking used throughout the system. - -mod chunk_with_proof; -mod error; -mod indexed_merkle_proof; - -use alloc::{collections::BTreeMap, string::String, vec::Vec}; -use core::{ - array::TryFromSliceError, - convert::{TryFrom, TryInto}, - fmt::{self, Debug, Display, Formatter, LowerHex, UpperHex}, -}; - -use blake2::{ - digest::{Update, VariableOutput}, - VarBlake2b, -}; -#[cfg(feature = "datasize")] -use datasize::DataSize; -use hex_fmt::HexFmt; -use itertools::Itertools; -#[cfg(feature = "once_cell")] -use once_cell::sync::OnceCell; -#[cfg(any(feature = "testing", test))] -use rand::{ - distributions::{Distribution, Standard}, - Rng, -}; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{de::Error as SerdeError, Deserialize, Deserializer, Serialize, Serializer}; - -#[cfg(any(feature = "testing", test))] -use crate::testing::TestRng; -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - checksummed_hex, CLType, CLTyped, -}; -pub use chunk_with_proof::ChunkWithProof; -pub use error::{ - ChunkWithProofVerificationError, Error as DigestError, MerkleConstructionError, - MerkleVerificationError, -}; -pub use indexed_merkle_proof::IndexedMerkleProof; - -/// The output of the hash function. -#[derive(Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Default)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr( - feature = "json-schema", - derive(JsonSchema), - schemars(description = "Hex-encoded hash digest.") -)] -pub struct Digest( - #[cfg_attr(feature = "json-schema", schemars(skip, with = "String"))] - pub(super) [u8; Digest::LENGTH], -); - -const CHUNK_DATA_ZEROED: &[u8] = &[0u8; ChunkWithProof::CHUNK_SIZE_BYTES]; - -impl Digest { - /// The number of bytes in a `Digest`. - pub const LENGTH: usize = 32; - - /// Sentinel hash to be used for hashing options in the case of `None`. - pub const SENTINEL_NONE: Digest = Digest([0u8; Digest::LENGTH]); - /// Sentinel hash to be used by `hash_slice_rfold`. Terminates the fold. - pub const SENTINEL_RFOLD: Digest = Digest([1u8; Digest::LENGTH]); - /// Sentinel hash to be used by `hash_merkle_tree` in the case of an empty list. - pub const SENTINEL_MERKLE_TREE: Digest = Digest([2u8; Digest::LENGTH]); - - /// Creates a 32-byte BLAKE2b hash digest from a given a piece of data. - pub fn hash>(data: T) -> Digest { - Self::blake2b_hash(data) - } - - /// Creates a 32-byte BLAKE2b hash digest from a given a piece of data - pub(crate) fn blake2b_hash>(data: T) -> Digest { - let mut ret = [0u8; Digest::LENGTH]; - // NOTE: Safe to unwrap here because our digest length is constant and valid - let mut hasher = VarBlake2b::new(Digest::LENGTH).unwrap(); - hasher.update(data); - hasher.finalize_variable(|hash| ret.clone_from_slice(hash)); - Digest(ret) - } - - /// Hashes a pair of byte slices. - pub fn hash_pair, U: AsRef<[u8]>>(data1: T, data2: U) -> Digest { - let mut result = [0; Digest::LENGTH]; - let mut hasher = VarBlake2b::new(Digest::LENGTH).unwrap(); - hasher.update(data1); - hasher.update(data2); - hasher.finalize_variable(|slice| { - result.copy_from_slice(slice); - }); - Digest(result) - } - - /// Hashes a raw Merkle root and leaf count to firm the final Merkle hash. - /// - /// To avoid pre-image attacks, the final hash that is based upon the number of leaves in the - /// Merkle tree and the root hash is prepended with a padding to ensure it is longer than the - /// actual chunk size. - /// - /// Without this feature, an attacker could construct an item that is only a few bytes long but - /// hashes to the same value as a much longer, chunked item by hashing `(len || root hash of - /// longer item's Merkle tree root)`. - /// - /// This function computes the correct final hash by ensuring the hasher used has been - /// initialized with padding before. - /// - /// With `once_cell` feature enabled (generally done by enabling `std` feature), for efficiency - /// reasons it uses a memoized hasher state computed on first run and cloned afterwards. - fn hash_merkle_root(leaf_count: u64, root: Digest) -> Digest { - #[cfg(feature = "once_cell")] - static PAIR_PREFIX_HASHER: OnceCell = OnceCell::new(); - - let mut result = [0; Digest::LENGTH]; - let get_hasher = || { - let mut hasher = VarBlake2b::new(Digest::LENGTH).unwrap(); - hasher.update(CHUNK_DATA_ZEROED); - hasher - }; - #[cfg(feature = "once_cell")] - let mut hasher = PAIR_PREFIX_HASHER.get_or_init(get_hasher).clone(); - #[cfg(not(feature = "once_cell"))] - let mut hasher = get_hasher(); - - hasher.update(leaf_count.to_le_bytes()); - hasher.update(root); - hasher.finalize_variable(|slice| { - result.copy_from_slice(slice); - }); - Digest(result) - } - - /// Returns the underlying BLAKE2b hash bytes - pub fn value(&self) -> [u8; Digest::LENGTH] { - self.0 - } - - /// Converts the underlying BLAKE2b hash digest array to a `Vec` - pub fn into_vec(self) -> Vec { - self.0.to_vec() - } - - /// Hashes an `impl IntoIterator` of [`Digest`]s into a single [`Digest`] by - /// constructing a [Merkle tree][1]. Reduces pairs of elements in the collection by repeatedly - /// calling [Digest::hash_pair]. - /// - /// The pattern of hashing is as follows. It is akin to [graph reduction][2]: - /// - /// ```text - /// 1 2 4 5 8 9 - /// │ │ │ │ │ │ - /// └─3 └─6 └─10 - /// │ │ │ - /// └───7 │ - /// │ │ - /// └───11 - /// ``` - /// - /// Finally hashes the number of elements with the resulting hash. In the example above the - /// final output would be `hash_pair(6_u64.to_le_bytes(), l)`. - /// - /// Returns [`Digest::SENTINEL_MERKLE_TREE`] when the input is empty. - /// - /// [1]: https://en.wikipedia.org/wiki/Merkle_tree - /// [2]: https://en.wikipedia.org/wiki/Graph_reduction - pub fn hash_merkle_tree(leaves: I) -> Digest - where - I: IntoIterator, - I::IntoIter: ExactSizeIterator, - { - let leaves = leaves.into_iter(); - let leaf_count = leaves.len() as u64; - - leaves.tree_fold1(Digest::hash_pair).map_or_else( - || Digest::SENTINEL_MERKLE_TREE, - |raw_root| Digest::hash_merkle_root(leaf_count, raw_root), - ) - } - - /// Hashes a `BTreeMap`. - pub fn hash_btree_map(btree_map: &BTreeMap) -> Result - where - K: ToBytes, - V: ToBytes, - { - let mut kv_hashes: Vec = Vec::with_capacity(btree_map.len()); - for (key, value) in btree_map.iter() { - kv_hashes.push(Digest::hash_pair( - Digest::hash(key.to_bytes()?), - Digest::hash(value.to_bytes()?), - )) - } - Ok(Self::hash_merkle_tree(kv_hashes)) - } - - /// Hashes a `&[Digest]` using a [right fold][1]. - /// - /// This pattern of hashing is as follows: - /// - /// ```text - /// hash_pair(a, &hash_pair(b, &hash_pair(c, &SENTINEL_RFOLD))) - /// ``` - /// - /// Unlike Merkle trees, this is suited to hashing heterogeneous lists we may wish to extend in - /// the future (ie, hashes of data structures that may undergo revision). - /// - /// Returns [`Digest::SENTINEL_RFOLD`] when given an empty slice as input. - /// - /// [1]: https://en.wikipedia.org/wiki/Fold_(higher-order_function)#Linear_folds - pub fn hash_slice_rfold(slice: &[Digest]) -> Digest { - Self::hash_slice_with_proof(slice, Self::SENTINEL_RFOLD) - } - - /// Hashes a `&[Digest]` using a [right fold][1]. Uses `proof` as a Merkle proof for the - /// missing tail of the slice. - /// - /// [1]: https://en.wikipedia.org/wiki/Fold_(higher-order_function)#Linear_folds - pub fn hash_slice_with_proof(slice: &[Digest], proof: Digest) -> Digest { - slice - .iter() - .rfold(proof, |prev, next| Digest::hash_pair(next, prev)) - } - - /// Returns a `Digest` parsed from a hex-encoded `Digest`. - pub fn from_hex>(hex_input: T) -> Result { - let bytes = checksummed_hex::decode(&hex_input).map_err(DigestError::Base16DecodeError)?; - let slice: [u8; Self::LENGTH] = bytes - .try_into() - .map_err(|_| DigestError::IncorrectDigestLength(hex_input.as_ref().len()))?; - Ok(Digest(slice)) - } - - /// Hash data into chunks if necessary. - pub fn hash_into_chunks_if_necessary(bytes: &[u8]) -> Digest { - if bytes.len() <= ChunkWithProof::CHUNK_SIZE_BYTES { - Digest::blake2b_hash(bytes) - } else { - Digest::hash_merkle_tree( - bytes - .chunks(ChunkWithProof::CHUNK_SIZE_BYTES) - .map(Digest::blake2b_hash), - ) - } - } - - /// Returns a new `Digest` directly initialized with the provided bytes; no hashing is done. - /// - /// This is equivalent to `Deploy::from`, but is a const function. - #[cfg(any(feature = "testing", test))] - pub const fn from_raw(raw_digest: [u8; Self::LENGTH]) -> Self { - Digest(raw_digest) - } - - /// Returns a random `Digest`. - #[cfg(any(feature = "testing", test))] - pub fn random(rng: &mut TestRng) -> Self { - Digest(rng.gen()) - } -} - -impl CLTyped for Digest { - fn cl_type() -> CLType { - CLType::ByteArray(Digest::LENGTH as u32) - } -} - -#[cfg(any(feature = "testing", test))] -impl Distribution for Standard { - fn sample(&self, rng: &mut R) -> Digest { - Digest(rng.gen()) - } -} - -impl LowerHex for Digest { - fn fmt(&self, f: &mut Formatter) -> fmt::Result { - let hex_string = base16::encode_lower(&self.value()); - if f.alternate() { - write!(f, "0x{}", hex_string) - } else { - write!(f, "{}", hex_string) - } - } -} - -impl UpperHex for Digest { - fn fmt(&self, f: &mut Formatter) -> fmt::Result { - let hex_string = base16::encode_upper(&self.value()); - if f.alternate() { - write!(f, "0x{}", hex_string) - } else { - write!(f, "{}", hex_string) - } - } -} - -impl Display for Digest { - fn fmt(&self, f: &mut Formatter) -> fmt::Result { - write!(f, "{:10}", HexFmt(&self.0)) - } -} - -impl Debug for Digest { - fn fmt(&self, f: &mut Formatter) -> fmt::Result { - write!(f, "{}", base16::encode_lower(&self.0)) - } -} - -impl From<[u8; Digest::LENGTH]> for Digest { - fn from(arr: [u8; Digest::LENGTH]) -> Self { - Digest(arr) - } -} - -impl<'a> TryFrom<&'a [u8]> for Digest { - type Error = TryFromSliceError; - - fn try_from(slice: &[u8]) -> Result { - <[u8; Digest::LENGTH]>::try_from(slice).map(Digest) - } -} - -impl AsRef<[u8]> for Digest { - fn as_ref(&self) -> &[u8] { - self.0.as_ref() - } -} - -impl From for [u8; Digest::LENGTH] { - fn from(hash: Digest) -> Self { - hash.0 - } -} - -impl ToBytes for Digest { - #[inline(always)] - fn to_bytes(&self) -> Result, bytesrepr::Error> { - self.0.to_bytes() - } - - #[inline(always)] - fn serialized_length(&self) -> usize { - self.0.serialized_length() - } - - #[inline(always)] - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - writer.extend_from_slice(&self.0); - Ok(()) - } -} - -impl FromBytes for Digest { - #[inline(always)] - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - FromBytes::from_bytes(bytes).map(|(arr, rem)| (Digest(arr), rem)) - } -} - -impl Serialize for Digest { - fn serialize(&self, serializer: S) -> Result { - if serializer.is_human_readable() { - base16::encode_lower(&self.0).serialize(serializer) - } else { - // This is to keep backwards compatibility with how HexForm encodes - // byte arrays. HexForm treats this like a slice. - self.0[..].serialize(serializer) - } - } -} - -impl<'de> Deserialize<'de> for Digest { - fn deserialize>(deserializer: D) -> Result { - if deserializer.is_human_readable() { - let hex_string = String::deserialize(deserializer)?; - let bytes = - checksummed_hex::decode(hex_string.as_bytes()).map_err(SerdeError::custom)?; - let data = - <[u8; Digest::LENGTH]>::try_from(bytes.as_ref()).map_err(SerdeError::custom)?; - Ok(Digest::from(data)) - } else { - let data = >::deserialize(deserializer)?; - Digest::try_from(data.as_slice()).map_err(D::Error::custom) - } - } -} - -#[cfg(test)] -mod tests { - use std::{collections::BTreeMap, iter}; - - use proptest_attr_macro::proptest; - - use super::Digest; - - use crate::{ - bytesrepr::{self, ToBytes}, - ChunkWithProof, - }; - - #[proptest] - fn bytesrepr_roundtrip(hash: [u8; Digest::LENGTH]) { - let digest = Digest(hash); - bytesrepr::test_serialization_roundtrip(&digest); - } - - #[proptest] - fn serde_roundtrip(hash: [u8; Digest::LENGTH]) { - let preser_digest = Digest(hash); - let serialized = serde_json::to_string(&preser_digest).unwrap(); - let deser_digest: Digest = serde_json::from_str(&serialized).unwrap(); - assert_eq!(preser_digest, deser_digest); - } - - #[test] - fn serde_custom_serialization() { - let serialized = serde_json::to_string(&Digest::SENTINEL_RFOLD).unwrap(); - let expected = format!("\"{:?}\"", Digest::SENTINEL_RFOLD); - assert_eq!(expected, serialized); - } - - #[test] - fn hash_known() { - // Data of length less or equal to [ChunkWithProof::CHUNK_SIZE_BYTES] - // are hashed using Blake2B algorithm. - // Larger data are chunked and Merkle tree hash is calculated. - // - // Please note that [ChunkWithProof::CHUNK_SIZE_BYTES] is `test` configuration - // is smaller than in production, to allow testing with more chunks - // with still reasonable time and memory consumption. - // - // See: [Digest::hash] - let inputs_and_digests = [ - ( - "", - "0e5751c026e543b2e8ab2eb06099daa1d1e5df47778f7787faab45cdf12fe3a8", - ), - ( - "abc", - "bddd813c634239723171ef3fee98579b94964e3bb1cb3e427262c8c068d52319", - ), - ( - "0123456789", - "7b6cb8d374484e221785288b035dc53fc9ddf000607f473fc2a3258d89a70398", - ), - ( - "01234567890", - "3d199478c18b7fe3ca1f4f2a9b3e07f708ff66ed52eb345db258abe8a812ed5c", - ), - ( - "The quick brown fox jumps over the lazy dog", - "01718cec35cd3d796dd00020e0bfecb473ad23457d063b75eff29c0ffa2e58a9", - ), - ]; - for (known_input, expected_digest) in &inputs_and_digests { - let known_input: &[u8] = known_input.as_ref(); - assert_eq!(*expected_digest, format!("{:?}", Digest::hash(known_input))); - } - } - - #[test] - fn from_valid_hex_should_succeed() { - for char in "abcdefABCDEF0123456789".chars() { - let input: String = iter::repeat(char).take(64).collect(); - assert!(Digest::from_hex(input).is_ok()); - } - } - - #[test] - fn from_hex_invalid_length_should_fail() { - for len in &[2_usize, 62, 63, 65, 66] { - let input: String = "f".repeat(*len); - assert!(Digest::from_hex(input).is_err()); - } - } - - #[test] - fn from_hex_invalid_char_should_fail() { - for char in "g %-".chars() { - let input: String = iter::repeat('f').take(63).chain(iter::once(char)).collect(); - assert!(Digest::from_hex(input).is_err()); - } - } - - #[test] - fn should_display_digest_in_hex() { - let hash = Digest([0u8; 32]); - let hash_hex = format!("{:?}", hash); - assert_eq!( - hash_hex, - "0000000000000000000000000000000000000000000000000000000000000000" - ); - } - - #[test] - fn should_print_digest_lower_hex() { - let hash = Digest([10u8; 32]); - let hash_lower_hex = format!("{:x}", hash); - assert_eq!( - hash_lower_hex, - "0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a" - ) - } - - #[test] - fn should_print_digest_upper_hex() { - let hash = Digest([10u8; 32]); - let hash_upper_hex = format!("{:X}", hash); - assert_eq!( - hash_upper_hex, - "0A0A0A0A0A0A0A0A0A0A0A0A0A0A0A0A0A0A0A0A0A0A0A0A0A0A0A0A0A0A0A0A" - ) - } - - #[test] - fn alternate_should_prepend_0x() { - let hash = Digest([0u8; 32]); - let hash_hex_alt = format!("{:#x}", hash); - assert_eq!( - hash_hex_alt, - "0x0000000000000000000000000000000000000000000000000000000000000000" - ) - } - - #[test] - fn test_hash_pair() { - let hash1 = Digest([1u8; 32]); - let hash2 = Digest([2u8; 32]); - - let hash = Digest::hash_pair(hash1, hash2); - let hash_lower_hex = format!("{:x}", hash); - - assert_eq!( - hash_lower_hex, - "30b600fb1f0cc0b3f0fc28cdcb7389405a6659be81c7d5c5905725aa3a5119ce" - ); - } - - #[test] - fn test_hash_rfold() { - let hashes = [ - Digest([1u8; 32]), - Digest([2u8; 32]), - Digest([3u8; 32]), - Digest([4u8; 32]), - Digest([5u8; 32]), - ]; - - let hash = Digest::hash_slice_rfold(&hashes[..]); - let hash_lower_hex = format!("{:x}", hash); - - assert_eq!( - hash_lower_hex, - "e137f4eb94d2387065454eecfe2cdb5584e3dbd5f1ca07fc511fffd13d234e8e" - ); - - let proof = Digest::hash_slice_rfold(&hashes[2..]); - let hash_proof = Digest::hash_slice_with_proof(&hashes[..2], proof); - - assert_eq!(hash, hash_proof); - } - - #[test] - fn test_hash_merkle_odd() { - let hashes = [ - Digest([1u8; 32]), - Digest([2u8; 32]), - Digest([3u8; 32]), - Digest([4u8; 32]), - Digest([5u8; 32]), - ]; - - let hash = Digest::hash_merkle_tree(hashes); - let hash_lower_hex = format!("{:x}", hash); - - assert_eq!( - hash_lower_hex, - "775cec8133b97b0e8d4e97659025d5bac4ed7c8927d1bd99cf62114df57f3e74" - ); - } - - #[test] - fn test_hash_merkle_even() { - let hashes = [ - Digest([1u8; 32]), - Digest([2u8; 32]), - Digest([3u8; 32]), - Digest([4u8; 32]), - Digest([5u8; 32]), - Digest([6u8; 32]), - ]; - - let hash = Digest::hash_merkle_tree(hashes); - let hash_lower_hex = format!("{:x}", hash); - - assert_eq!( - hash_lower_hex, - "4bd50b08a8366b28c35bc831b95d147123bad01c29ffbf854b659c4b3ea4086c" - ); - } - - #[test] - fn test_hash_btreemap() { - let mut map = BTreeMap::new(); - let _ = map.insert(Digest([1u8; 32]), Digest([2u8; 32])); - let _ = map.insert(Digest([3u8; 32]), Digest([4u8; 32])); - let _ = map.insert(Digest([5u8; 32]), Digest([6u8; 32])); - let _ = map.insert(Digest([7u8; 32]), Digest([8u8; 32])); - let _ = map.insert(Digest([9u8; 32]), Digest([10u8; 32])); - - let hash = Digest::hash_btree_map(&map).unwrap(); - let hash_lower_hex = format!("{:x}", hash); - - assert_eq!( - hash_lower_hex, - "fd1214a627473ffc6d6cc97e7012e6344d74abbf987b48cde5d0642049a0db98" - ); - } - - #[test] - fn digest_deserialize_regression() { - let input = Digest([0; 32]); - let serialized = bincode::serialize(&input).expect("failed to serialize."); - - let expected = vec![ - 32, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - ]; - - assert_eq!(expected, serialized); - } - - #[test] - fn should_assert_simple_digest_serialization_format() { - let digest_bytes = [0; 32]; - - assert_eq!( - Digest(digest_bytes).to_bytes().unwrap(), - digest_bytes.to_vec() - ); - } - - #[test] - fn merkle_roots_are_preimage_resistent() { - // Input data is two chunks long. - // - // The resulting tree will look like this: - // - // 1..0 a..j - // │ │ - // └─────── R - // - // The Merkle root is thus: R = h( h(1..0) || h(a..j) ) - // - // h(1..0) = 807f1ba73147c3a96c2d63b38dd5a5f514f66290a1436bb9821e9f2a72eff263 - // h(a..j) = 499e1cdb476523fedafc9d9db31125e2744f271578ea95b16ab4bd1905f05fea - // R=h(h(1..0)||h(a..j)) = 1319394a98d0cb194f960e3748baeb2045a9ec28aa51e0d42011be43f4a91f5f - // h(2u64le || R) = c31f0bb6ef569354d1a26c3a51f1ad4b6d87cef7f73a290ab6be8db6a9c7d4ee - // - // The final step is to hash h(2u64le || R), which is the length as little endian - // concatenated with the root. - - // Constants used here assume a chunk size of 10 bytes. - assert_eq!(ChunkWithProof::CHUNK_SIZE_BYTES, 10); - - let long_data = b"1234567890abcdefghij"; - assert_eq!(long_data.len(), ChunkWithProof::CHUNK_SIZE_BYTES * 2); - - // The `long_data_hash` is constructed manually here, as `Digest::hash` still had - // deactivated chunking code at the time this test was written. - let long_data_hash = Digest::hash_merkle_tree( - long_data - .as_ref() - .chunks(ChunkWithProof::CHUNK_SIZE_BYTES) - .map(Digest::blake2b_hash), - ); - - // The concatenation of `2u64` in little endian + the Merkle root hash `R`. Note that this - // is a valid hashable object on its own. - let maybe_colliding_short_data = [ - 2, 0, 0, 0, 0, 0, 0, 0, 19, 25, 57, 74, 152, 208, 203, 25, 79, 150, 14, 55, 72, 186, - 235, 32, 69, 169, 236, 40, 170, 81, 224, 212, 32, 17, 190, 67, 244, 169, 31, 95, - ]; - - // Use `blake2b_hash` to work around the issue of the chunk size being shorter than the - // digest length. - let short_data_hash = Digest::blake2b_hash(maybe_colliding_short_data); - - // Ensure there is no collision. You can verify this test is correct by temporarily changing - // the `Digest::hash_merkle_tree` function to use the unpadded `hash_pair` function, instead - // of `hash_merkle_root`. - assert_ne!(long_data_hash, short_data_hash); - - // The expected input for the root hash is the colliding data, but prefixed with a full - // chunk of zeros. - let expected_final_hash_input = [ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 19, 25, 57, 74, 152, 208, 203, - 25, 79, 150, 14, 55, 72, 186, 235, 32, 69, 169, 236, 40, 170, 81, 224, 212, 32, 17, - 190, 67, 244, 169, 31, 95, - ]; - assert_eq!( - Digest::blake2b_hash(expected_final_hash_input), - long_data_hash - ); - - // Another way to specify this sanity check is to say that the short and long data should - // hash differently. - // - // Note: This condition is true at the time of writing this test, where chunk hashing is - // disabled. It should still hold true once enabled. - assert_ne!( - Digest::hash(maybe_colliding_short_data), - Digest::hash(long_data) - ); - - // In a similar manner, the internal padded data should also not hash equal to either, as it - // should be hashed using the chunking function. - assert_ne!( - Digest::hash(maybe_colliding_short_data), - Digest::hash(expected_final_hash_input) - ); - assert_ne!( - Digest::hash(long_data), - Digest::hash(expected_final_hash_input) - ); - } -} diff --git a/casper_types_ver_2_0/src/digest/chunk_with_proof.rs b/casper_types_ver_2_0/src/digest/chunk_with_proof.rs deleted file mode 100644 index 404e74b3..00000000 --- a/casper_types_ver_2_0/src/digest/chunk_with_proof.rs +++ /dev/null @@ -1,335 +0,0 @@ -//! Chunks with Merkle proofs. - -use alloc::vec::Vec; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -use super::{ChunkWithProofVerificationError, Digest, IndexedMerkleProof, MerkleConstructionError}; -use crate::bytesrepr::{self, Bytes, FromBytes, ToBytes}; - -/// Represents a chunk of data with attached proof. -#[derive(PartialEq, Eq, Debug, Clone, Serialize, Deserialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[serde(deny_unknown_fields)] -pub struct ChunkWithProof { - proof: IndexedMerkleProof, - chunk: Bytes, -} - -impl ToBytes for ChunkWithProof { - fn write_bytes(&self, buf: &mut Vec) -> Result<(), bytesrepr::Error> { - buf.append(&mut self.proof.to_bytes()?); - buf.append(&mut self.chunk.to_bytes()?); - - Ok(()) - } - - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut result = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut result)?; - Ok(result) - } - - fn serialized_length(&self) -> usize { - self.proof.serialized_length() + self.chunk.serialized_length() - } -} - -impl FromBytes for ChunkWithProof { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (proof, remainder) = FromBytes::from_bytes(bytes)?; - let (chunk, remainder) = FromBytes::from_bytes(remainder)?; - - Ok((ChunkWithProof { proof, chunk }, remainder)) - } -} - -impl ChunkWithProof { - #[cfg(test)] - /// 10 bytes for testing purposes. - pub const CHUNK_SIZE_BYTES: usize = 10; - - #[cfg(not(test))] - /// 8 MiB - pub const CHUNK_SIZE_BYTES: usize = 8 * 1024 * 1024; - - /// Constructs the [`ChunkWithProof`] that contains the chunk of data with the appropriate index - /// and the cryptographic proof. - /// - /// Empty data is always represented as single, empty chunk and not as zero chunks. - pub fn new(data: &[u8], index: u64) -> Result { - Ok(if data.is_empty() { - ChunkWithProof { - proof: IndexedMerkleProof::new([Digest::blake2b_hash([])], index)?, - chunk: Bytes::new(), - } - } else { - ChunkWithProof { - proof: IndexedMerkleProof::new( - data.chunks(Self::CHUNK_SIZE_BYTES) - .map(Digest::blake2b_hash), - index, - )?, - chunk: Bytes::from( - data.chunks(Self::CHUNK_SIZE_BYTES) - .nth(index as usize) - .ok_or_else(|| MerkleConstructionError::IndexOutOfBounds { - count: data.chunks(Self::CHUNK_SIZE_BYTES).len() as u64, - index, - })?, - ), - } - }) - } - - /// Get a reference to the `ChunkWithProof`'s chunk. - pub fn chunk(&self) -> &[u8] { - self.chunk.as_slice() - } - - /// Convert a chunk with proof into the underlying chunk. - pub fn into_chunk(self) -> Bytes { - self.chunk - } - - /// Returns the `IndexedMerkleProof`. - pub fn proof(&self) -> &IndexedMerkleProof { - &self.proof - } - - /// Verify the integrity of this chunk with indexed Merkle proof. - pub fn verify(&self) -> Result<(), ChunkWithProofVerificationError> { - self.proof().verify()?; - let first_digest_in_indexed_merkle_proof = - self.proof().merkle_proof().first().ok_or_else(|| { - ChunkWithProofVerificationError::ChunkWithProofHasEmptyMerkleProof { - chunk_with_proof: self.clone(), - } - })?; - let hash_of_chunk = Digest::hash(self.chunk()); - if *first_digest_in_indexed_merkle_proof != hash_of_chunk { - return Err( - ChunkWithProofVerificationError::FirstDigestInMerkleProofDidNotMatchHashOfChunk { - first_digest_in_indexed_merkle_proof: *first_digest_in_indexed_merkle_proof, - hash_of_chunk, - }, - ); - } - Ok(()) - } -} - -#[cfg(test)] -mod tests { - use std::convert::TryInto; - - use proptest::{ - arbitrary::Arbitrary, - strategy::{BoxedStrategy, Strategy}, - }; - use proptest_attr_macro::proptest; - use rand::Rng; - - use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - ChunkWithProof, Digest, MerkleConstructionError, - }; - - fn prepare_bytes(length: usize) -> Vec { - let mut rng = rand::thread_rng(); - - (0..length).map(|_| rng.gen()).collect() - } - - fn random_chunk_with_proof() -> ChunkWithProof { - let mut rng = rand::thread_rng(); - let data: Vec = prepare_bytes(rng.gen_range(1..1024)); - let index = rng.gen_range(0..data.chunks(ChunkWithProof::CHUNK_SIZE_BYTES).len() as u64); - - ChunkWithProof::new(&data, index).unwrap() - } - - impl ChunkWithProof { - fn replace_first_proof(self) -> Self { - let mut rng = rand::thread_rng(); - let ChunkWithProof { mut proof, chunk } = self; - - // Keep the same number of proofs, but replace the first one with some random hash - let mut merkle_proof: Vec<_> = proof.merkle_proof().to_vec(); - merkle_proof.pop(); - merkle_proof.insert(0, Digest::hash(rng.gen::().to_string())); - proof.inject_merkle_proof(merkle_proof); - - ChunkWithProof { proof, chunk } - } - } - - #[derive(Debug)] - pub struct TestDataSize(usize); - impl Arbitrary for TestDataSize { - type Parameters = (); - type Strategy = BoxedStrategy; - - fn arbitrary_with(_: Self::Parameters) -> Self::Strategy { - (0usize..32usize) - .prop_map(|chunk_count| { - TestDataSize(chunk_count * ChunkWithProof::CHUNK_SIZE_BYTES) - }) - .boxed() - } - } - - #[derive(Debug)] - pub struct TestDataSizeAtLeastTwoChunks(usize); - impl Arbitrary for TestDataSizeAtLeastTwoChunks { - type Parameters = (); - type Strategy = BoxedStrategy; - - fn arbitrary_with(_: Self::Parameters) -> Self::Strategy { - (2usize..32usize) - .prop_map(|chunk_count| { - TestDataSizeAtLeastTwoChunks(chunk_count * ChunkWithProof::CHUNK_SIZE_BYTES) - }) - .boxed() - } - } - - #[proptest] - fn generates_valid_proof(test_data: TestDataSize) { - for data in [prepare_bytes(test_data.0), vec![0u8; test_data.0]] { - let number_of_chunks: u64 = data - .chunks(ChunkWithProof::CHUNK_SIZE_BYTES) - .len() - .try_into() - .unwrap(); - - assert!((0..number_of_chunks) - .map(|chunk_index| { ChunkWithProof::new(data.as_slice(), chunk_index).unwrap() }) - .all(|chunk_with_proof| chunk_with_proof.verify().is_ok())); - } - } - - #[proptest] - fn validate_chunks_against_hash_merkle_tree(test_data: TestDataSizeAtLeastTwoChunks) { - // This test requires at least two chunks - assert!(test_data.0 >= ChunkWithProof::CHUNK_SIZE_BYTES * 2); - - for data in [prepare_bytes(test_data.0), vec![0u8; test_data.0]] { - let expected_root = Digest::hash_merkle_tree( - data.chunks(ChunkWithProof::CHUNK_SIZE_BYTES) - .map(Digest::hash), - ); - - // Calculate proof with `ChunkWithProof` - let ChunkWithProof { - proof: proof_0, - chunk: _, - } = ChunkWithProof::new(data.as_slice(), 0).unwrap(); - let ChunkWithProof { - proof: proof_1, - chunk: _, - } = ChunkWithProof::new(data.as_slice(), 1).unwrap(); - - assert_eq!(proof_0.root_hash(), expected_root); - assert_eq!(proof_1.root_hash(), expected_root); - } - } - - #[proptest] - fn verifies_chunk_with_proofs(test_data: TestDataSize) { - for data in [prepare_bytes(test_data.0), vec![0u8; test_data.0]] { - let chunk_with_proof = ChunkWithProof::new(data.as_slice(), 0).unwrap(); - assert!(chunk_with_proof.verify().is_ok()); - - let chunk_with_incorrect_proof = chunk_with_proof.replace_first_proof(); - assert!(chunk_with_incorrect_proof.verify().is_err()); - } - } - - #[proptest] - fn serde_deserialization_of_malformed_chunk_should_work(test_data: TestDataSize) { - for data in [prepare_bytes(test_data.0), vec![0u8; test_data.0]] { - let chunk_with_proof = ChunkWithProof::new(data.as_slice(), 0).unwrap(); - - let json = serde_json::to_string(&chunk_with_proof).unwrap(); - assert_eq!( - chunk_with_proof, - serde_json::from_str::(&json) - .expect("should deserialize correctly") - ); - - let chunk_with_incorrect_proof = chunk_with_proof.replace_first_proof(); - let json = serde_json::to_string(&chunk_with_incorrect_proof).unwrap(); - serde_json::from_str::(&json).expect("should deserialize correctly"); - } - } - - #[proptest] - fn bytesrepr_deserialization_of_malformed_chunk_should_work(test_data: TestDataSize) { - for data in [prepare_bytes(test_data.0), vec![0u8; test_data.0]] { - let chunk_with_proof = ChunkWithProof::new(data.as_slice(), 0).unwrap(); - - let bytes = chunk_with_proof - .to_bytes() - .expect("should serialize correctly"); - - let (deserialized_chunk_with_proof, _) = - ChunkWithProof::from_bytes(&bytes).expect("should deserialize correctly"); - - assert_eq!(chunk_with_proof, deserialized_chunk_with_proof); - - let chunk_with_incorrect_proof = chunk_with_proof.replace_first_proof(); - let bytes = chunk_with_incorrect_proof - .to_bytes() - .expect("should serialize correctly"); - - ChunkWithProof::from_bytes(&bytes).expect("should deserialize correctly"); - } - } - - #[test] - fn returns_error_on_incorrect_index() { - // This test needs specific data sizes, hence it doesn't use the proptest - - let chunk_with_proof = ChunkWithProof::new(&[], 0).expect("should create with empty data"); - assert!(chunk_with_proof.verify().is_ok()); - - let chunk_with_proof = - ChunkWithProof::new(&[], 1).expect_err("should error with empty data and index > 0"); - if let MerkleConstructionError::IndexOutOfBounds { count, index } = chunk_with_proof { - assert_eq!(count, 1); - assert_eq!(index, 1); - } else { - panic!("expected MerkleConstructionError::IndexOutOfBounds"); - } - - let data_larger_than_single_chunk = vec![0u8; ChunkWithProof::CHUNK_SIZE_BYTES * 10]; - ChunkWithProof::new(data_larger_than_single_chunk.as_slice(), 9).unwrap(); - - let chunk_with_proof = - ChunkWithProof::new(data_larger_than_single_chunk.as_slice(), 10).unwrap_err(); - if let MerkleConstructionError::IndexOutOfBounds { count, index } = chunk_with_proof { - assert_eq!(count, 10); - assert_eq!(index, 10); - } else { - panic!("expected MerkleConstructionError::IndexOutOfBounds"); - } - } - - #[test] - fn bytesrepr_serialization() { - let chunk_with_proof = random_chunk_with_proof(); - bytesrepr::test_serialization_roundtrip(&chunk_with_proof); - } - - #[test] - fn chunk_with_empty_data_contains_a_single_proof() { - let chunk_with_proof = ChunkWithProof::new(&[], 0).unwrap(); - assert_eq!(chunk_with_proof.proof.merkle_proof().len(), 1) - } -} diff --git a/casper_types_ver_2_0/src/digest/error.rs b/casper_types_ver_2_0/src/digest/error.rs deleted file mode 100644 index 539e7267..00000000 --- a/casper_types_ver_2_0/src/digest/error.rs +++ /dev/null @@ -1,233 +0,0 @@ -//! Errors in constructing and validating indexed Merkle proofs, chunks with indexed Merkle proofs. - -use alloc::string::String; -use core::fmt::{self, Display, Formatter}; -#[cfg(feature = "std")] -use std::error::Error as StdError; - -use super::{ChunkWithProof, Digest}; -use crate::bytesrepr; - -/// Possible hashing errors. -#[derive(Debug)] -#[non_exhaustive] -pub enum Error { - /// The digest length was an incorrect size. - IncorrectDigestLength(usize), - /// There was a decoding error. - Base16DecodeError(base16::DecodeError), -} - -impl Display for Error { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - match self { - Error::IncorrectDigestLength(length) => { - write!( - formatter, - "incorrect digest length {}, expected length {}.", - length, - Digest::LENGTH - ) - } - Error::Base16DecodeError(error) => { - write!(formatter, "base16 decode error: {}", error) - } - } - } -} - -#[cfg(feature = "std")] -impl StdError for Error { - fn source(&self) -> Option<&(dyn StdError + 'static)> { - match self { - Error::IncorrectDigestLength(_) => None, - Error::Base16DecodeError(error) => Some(error), - } - } -} - -/// Error validating a Merkle proof of a chunk. -#[derive(Debug, PartialEq, Eq)] -#[non_exhaustive] -pub enum MerkleVerificationError { - /// Index out of bounds. - IndexOutOfBounds { - /// Count. - count: u64, - /// Index. - index: u64, - }, - - /// Unexpected proof length. - UnexpectedProofLength { - /// Count. - count: u64, - /// Index. - index: u64, - /// Expected proof length. - expected_proof_length: u8, - /// Actual proof length. - actual_proof_length: usize, - }, -} - -impl Display for MerkleVerificationError { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - match self { - MerkleVerificationError::IndexOutOfBounds { count, index } => { - write!( - formatter, - "index out of bounds - count: {}, index: {}", - count, index - ) - } - MerkleVerificationError::UnexpectedProofLength { - count, - index, - expected_proof_length, - actual_proof_length, - } => { - write!( - formatter, - "unexpected proof length - count: {}, index: {}, expected length: {}, actual \ - length: {}", - count, index, expected_proof_length, actual_proof_length - ) - } - } - } -} - -#[cfg(feature = "std")] -impl StdError for MerkleVerificationError {} - -/// Error validating a chunk with proof. -#[derive(Debug)] -#[non_exhaustive] -pub enum ChunkWithProofVerificationError { - /// Indexed Merkle proof verification error. - MerkleVerificationError(MerkleVerificationError), - - /// Empty Merkle proof for trie with chunk. - ChunkWithProofHasEmptyMerkleProof { - /// Chunk with empty Merkle proof. - chunk_with_proof: ChunkWithProof, - }, - /// Unexpected Merkle root hash. - UnexpectedRootHash, - /// Bytesrepr error. - Bytesrepr(bytesrepr::Error), - - /// First digest in indexed Merkle proof did not match hash of chunk. - FirstDigestInMerkleProofDidNotMatchHashOfChunk { - /// First digest in indexed Merkle proof. - first_digest_in_indexed_merkle_proof: Digest, - /// Hash of chunk. - hash_of_chunk: Digest, - }, -} - -impl Display for ChunkWithProofVerificationError { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - match self { - ChunkWithProofVerificationError::MerkleVerificationError(error) => { - write!(formatter, "{}", error) - } - ChunkWithProofVerificationError::ChunkWithProofHasEmptyMerkleProof { - chunk_with_proof, - } => { - write!( - formatter, - "chunk with proof has empty merkle proof: {:?}", - chunk_with_proof - ) - } - ChunkWithProofVerificationError::UnexpectedRootHash => { - write!(formatter, "merkle proof has an unexpected root hash") - } - ChunkWithProofVerificationError::Bytesrepr(error) => { - write!( - formatter, - "bytesrepr error computing chunkable hash: {}", - error - ) - } - ChunkWithProofVerificationError::FirstDigestInMerkleProofDidNotMatchHashOfChunk { - first_digest_in_indexed_merkle_proof, - hash_of_chunk, - } => { - write!( - formatter, - "first digest in merkle proof did not match hash of chunk - first digest: \ - {:?}, hash of chunk: {:?}", - first_digest_in_indexed_merkle_proof, hash_of_chunk - ) - } - } - } -} - -impl From for ChunkWithProofVerificationError { - fn from(error: MerkleVerificationError) -> Self { - ChunkWithProofVerificationError::MerkleVerificationError(error) - } -} - -#[cfg(feature = "std")] -impl StdError for ChunkWithProofVerificationError { - fn source(&self) -> Option<&(dyn StdError + 'static)> { - match self { - ChunkWithProofVerificationError::MerkleVerificationError(error) => Some(error), - ChunkWithProofVerificationError::Bytesrepr(error) => Some(error), - ChunkWithProofVerificationError::ChunkWithProofHasEmptyMerkleProof { .. } - | ChunkWithProofVerificationError::UnexpectedRootHash - | ChunkWithProofVerificationError::FirstDigestInMerkleProofDidNotMatchHashOfChunk { - .. - } => None, - } - } -} - -/// Error during the construction of a Merkle proof. -#[derive(Debug, Eq, PartialEq, Clone)] -#[non_exhaustive] -pub enum MerkleConstructionError { - /// Chunk index was out of bounds. - IndexOutOfBounds { - /// Total chunks count. - count: u64, - /// Requested index. - index: u64, - }, - /// Too many Merkle tree leaves. - TooManyLeaves { - /// Total chunks count. - count: String, - }, -} - -impl Display for MerkleConstructionError { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - match self { - MerkleConstructionError::IndexOutOfBounds { count, index } => { - write!( - formatter, - "could not construct merkle proof - index out of bounds - count: {}, index: {}", - count, index - ) - } - MerkleConstructionError::TooManyLeaves { count } => { - write!( - formatter, - "could not construct merkle proof - too many leaves - count: {}, max: {} \ - (u64::MAX)", - count, - u64::MAX - ) - } - } - } -} - -#[cfg(feature = "std")] -impl StdError for MerkleConstructionError {} diff --git a/casper_types_ver_2_0/src/digest/indexed_merkle_proof.rs b/casper_types_ver_2_0/src/digest/indexed_merkle_proof.rs deleted file mode 100644 index 7e8a7f7c..00000000 --- a/casper_types_ver_2_0/src/digest/indexed_merkle_proof.rs +++ /dev/null @@ -1,514 +0,0 @@ -//! Constructing and validating indexed Merkle proofs. -use alloc::{string::ToString, vec::Vec}; -use core::convert::TryInto; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -use itertools::Itertools; -#[cfg(any(feature = "once_cell", test))] -use once_cell::sync::OnceCell; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -use super::{Digest, MerkleConstructionError, MerkleVerificationError}; -use crate::bytesrepr::{self, FromBytes, ToBytes}; - -/// A Merkle proof of the given chunk. -#[derive(PartialEq, Eq, Debug, Clone, Serialize, Deserialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[serde(deny_unknown_fields)] -pub struct IndexedMerkleProof { - index: u64, - count: u64, - merkle_proof: Vec, - #[cfg_attr(any(feature = "once_cell", test), serde(skip))] - #[cfg_attr( - all(any(feature = "once_cell", test), feature = "datasize"), - data_size(skip) - )] - #[cfg(any(feature = "once_cell", test))] - root_hash: OnceCell, -} - -impl ToBytes for IndexedMerkleProof { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut result = bytesrepr::allocate_buffer(self)?; - result.append(&mut self.index.to_bytes()?); - result.append(&mut self.count.to_bytes()?); - result.append(&mut self.merkle_proof.to_bytes()?); - Ok(result) - } - - fn serialized_length(&self) -> usize { - self.index.serialized_length() - + self.count.serialized_length() - + self.merkle_proof.serialized_length() - } -} - -impl FromBytes for IndexedMerkleProof { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (index, remainder) = FromBytes::from_bytes(bytes)?; - let (count, remainder) = FromBytes::from_bytes(remainder)?; - let (merkle_proof, remainder) = FromBytes::from_bytes(remainder)?; - - Ok(( - IndexedMerkleProof { - index, - count, - merkle_proof, - #[cfg(any(feature = "once_cell", test))] - root_hash: OnceCell::new(), - }, - remainder, - )) - } -} - -impl IndexedMerkleProof { - /// Attempts to construct a new instance. - pub fn new(leaves: I, index: u64) -> Result - where - I: IntoIterator, - I::IntoIter: ExactSizeIterator, - { - use HashOrProof::{Hash as H, Proof as P}; - - enum HashOrProof { - Hash(Digest), - Proof(Vec), - } - - let leaves = leaves.into_iter(); - let count: u64 = - leaves - .len() - .try_into() - .map_err(|_| MerkleConstructionError::TooManyLeaves { - count: leaves.len().to_string(), - })?; - - let maybe_proof = leaves - .enumerate() - .map(|(i, hash)| { - if i as u64 == index { - P(vec![hash]) - } else { - H(hash) - } - }) - .tree_fold1(|x, y| match (x, y) { - (H(hash_x), H(hash_y)) => H(Digest::hash_pair(hash_x, hash_y)), - (H(hash), P(mut proof)) | (P(mut proof), H(hash)) => { - proof.push(hash); - P(proof) - } - (P(_), P(_)) => unreachable!(), - }); - - match maybe_proof { - None | Some(H(_)) => Err(MerkleConstructionError::IndexOutOfBounds { count, index }), - Some(P(merkle_proof)) => Ok(IndexedMerkleProof { - index, - count, - merkle_proof, - #[cfg(any(feature = "once_cell", test))] - root_hash: OnceCell::new(), - }), - } - } - - /// Returns the index. - pub fn index(&self) -> u64 { - self.index - } - - /// Returns the total count of chunks. - pub fn count(&self) -> u64 { - self.count - } - - /// Returns the root hash of this proof (i.e. the index hashed with the Merkle root hash). - /// - /// Note that with the `once_cell` feature enabled (generally done by enabling the `std` - /// feature), the root hash is memoized, and hence calling this method is cheap after the first - /// call. Without `once_cell` enabled, every call to this method calculates the root hash. - pub fn root_hash(&self) -> Digest { - #[cfg(any(feature = "once_cell", test))] - return *self.root_hash.get_or_init(|| self.compute_root_hash()); - - #[cfg(not(any(feature = "once_cell", test)))] - self.compute_root_hash() - } - - /// Returns the full collection of hash digests of the proof. - pub fn merkle_proof(&self) -> &[Digest] { - &self.merkle_proof - } - - /// Attempts to verify self. - pub fn verify(&self) -> Result<(), MerkleVerificationError> { - if self.index >= self.count { - return Err(MerkleVerificationError::IndexOutOfBounds { - count: self.count, - index: self.index, - }); - } - let expected_proof_length = self.compute_expected_proof_length(); - if self.merkle_proof.len() != expected_proof_length as usize { - return Err(MerkleVerificationError::UnexpectedProofLength { - count: self.count, - index: self.index, - expected_proof_length, - actual_proof_length: self.merkle_proof.len(), - }); - } - Ok(()) - } - - fn compute_root_hash(&self) -> Digest { - let IndexedMerkleProof { - count, - merkle_proof, - .. - } = self; - - let mut hashes = merkle_proof.iter(); - let raw_root = if let Some(leaf_hash) = hashes.next().cloned() { - // Compute whether to hash left or right for the elements of the Merkle proof. - // This gives a path to the value with the specified index. - // We represent this path as a sequence of 64 bits. 1 here means "hash right". - let mut path: u64 = 0; - let mut n = self.count; - let mut i = self.index; - while n > 1 { - path <<= 1; - let pivot = 1u64 << (63 - (n - 1).leading_zeros()); - if i < pivot { - n = pivot; - } else { - path |= 1; - n -= pivot; - i -= pivot; - } - } - - // Compute the raw Merkle root by hashing the proof from leaf hash up. - hashes.fold(leaf_hash, |acc, hash| { - let digest = if (path & 1) == 1 { - Digest::hash_pair(hash, acc) - } else { - Digest::hash_pair(acc, hash) - }; - path >>= 1; - digest - }) - } else { - Digest::SENTINEL_MERKLE_TREE - }; - - // The Merkle root is the hash of the count with the raw root. - Digest::hash_merkle_root(*count, raw_root) - } - - // Proof lengths are never bigger than 65 is because we are using 64 bit counts - fn compute_expected_proof_length(&self) -> u8 { - if self.count == 0 { - return 0; - } - let mut l = 1; - let mut n = self.count; - let mut i = self.index; - while n > 1 { - let pivot = 1u64 << (63 - (n - 1).leading_zeros()); - if i < pivot { - n = pivot; - } else { - n -= pivot; - i -= pivot; - } - l += 1; - } - l - } - - #[cfg(test)] - pub fn inject_merkle_proof(&mut self, merkle_proof: Vec) { - self.merkle_proof = merkle_proof; - } -} - -#[cfg(test)] -mod tests { - use once_cell::sync::OnceCell; - use proptest::prelude::{prop_assert, prop_assert_eq}; - use proptest_attr_macro::proptest; - use rand::{distributions::Standard, Rng}; - - use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - Digest, IndexedMerkleProof, MerkleVerificationError, - }; - - fn random_indexed_merkle_proof() -> IndexedMerkleProof { - let mut rng = rand::thread_rng(); - let leaf_count: u64 = rng.gen_range(1..100); - let index = rng.gen_range(0..leaf_count); - let leaves: Vec = (0..leaf_count) - .map(|i| Digest::hash(i.to_le_bytes())) - .collect(); - IndexedMerkleProof::new(leaves.iter().cloned(), index) - .expect("should create indexed Merkle proof") - } - - #[test] - fn test_merkle_proofs() { - let mut rng = rand::thread_rng(); - for _ in 0..20 { - let leaf_count: u64 = rng.gen_range(1..100); - let index = rng.gen_range(0..leaf_count); - let leaves: Vec = (0..leaf_count) - .map(|i| Digest::hash(i.to_le_bytes())) - .collect(); - let root = Digest::hash_merkle_tree(leaves.clone()); - let indexed_merkle_proof = IndexedMerkleProof::new(leaves.clone(), index).unwrap(); - assert_eq!( - indexed_merkle_proof.compute_expected_proof_length(), - indexed_merkle_proof.merkle_proof().len() as u8 - ); - assert_eq!(indexed_merkle_proof.verify(), Ok(())); - assert_eq!(leaf_count, indexed_merkle_proof.count); - assert_eq!(leaves[index as usize], indexed_merkle_proof.merkle_proof[0]); - assert_eq!(root, indexed_merkle_proof.root_hash()); - } - } - - #[test] - fn out_of_bounds_index() { - let out_of_bounds_indexed_merkle_proof = IndexedMerkleProof { - index: 23, - count: 4, - merkle_proof: vec![Digest([0u8; 32]); 3], - root_hash: OnceCell::new(), - }; - assert_eq!( - out_of_bounds_indexed_merkle_proof.verify(), - Err(MerkleVerificationError::IndexOutOfBounds { - count: 4, - index: 23 - }) - ) - } - - #[test] - fn unexpected_proof_length() { - let out_of_bounds_indexed_merkle_proof = IndexedMerkleProof { - index: 1235, - count: 5647, - merkle_proof: vec![Digest([0u8; 32]); 13], - root_hash: OnceCell::new(), - }; - assert_eq!( - out_of_bounds_indexed_merkle_proof.verify(), - Err(MerkleVerificationError::UnexpectedProofLength { - count: 5647, - index: 1235, - expected_proof_length: 14, - actual_proof_length: 13 - }) - ) - } - - #[test] - fn empty_unexpected_proof_length() { - let out_of_bounds_indexed_merkle_proof = IndexedMerkleProof { - index: 0, - count: 0, - merkle_proof: vec![Digest([0u8; 32]); 3], - root_hash: OnceCell::new(), - }; - assert_eq!( - out_of_bounds_indexed_merkle_proof.verify(), - Err(MerkleVerificationError::IndexOutOfBounds { count: 0, index: 0 }) - ) - } - - #[test] - fn empty_out_of_bounds_index() { - let out_of_bounds_indexed_merkle_proof = IndexedMerkleProof { - index: 23, - count: 0, - merkle_proof: vec![], - root_hash: OnceCell::new(), - }; - assert_eq!( - out_of_bounds_indexed_merkle_proof.verify(), - Err(MerkleVerificationError::IndexOutOfBounds { - count: 0, - index: 23 - }) - ) - } - - #[test] - fn deep_proof_doesnt_kill_stack() { - const PROOF_LENGTH: usize = 63; - let indexed_merkle_proof = IndexedMerkleProof { - index: 42, - count: 1 << (PROOF_LENGTH - 1), - merkle_proof: vec![Digest([0u8; Digest::LENGTH]); PROOF_LENGTH], - root_hash: OnceCell::new(), - }; - let _hash = indexed_merkle_proof.root_hash(); - } - - #[test] - fn empty_proof() { - let empty_merkle_root = Digest::hash_merkle_tree(vec![]); - assert_eq!(empty_merkle_root, Digest::SENTINEL_MERKLE_TREE); - let indexed_merkle_proof = IndexedMerkleProof { - index: 0, - count: 0, - merkle_proof: vec![], - root_hash: OnceCell::new(), - }; - assert!(indexed_merkle_proof.verify().is_err()); - } - - #[proptest] - fn expected_proof_length_le_65(index: u64, count: u64) { - let indexed_merkle_proof = IndexedMerkleProof { - index, - count, - merkle_proof: vec![], - root_hash: OnceCell::new(), - }; - prop_assert!(indexed_merkle_proof.compute_expected_proof_length() <= 65); - } - - fn reference_root_from_proof(index: u64, count: u64, proof: &[Digest]) -> Digest { - fn compute_raw_root_from_proof(index: u64, leaf_count: u64, proof: &[Digest]) -> Digest { - if leaf_count == 0 { - return Digest::SENTINEL_MERKLE_TREE; - } - if leaf_count == 1 { - return proof[0]; - } - let half = 1u64 << (63 - (leaf_count - 1).leading_zeros()); - let last = proof.len() - 1; - if index < half { - let left = compute_raw_root_from_proof(index, half, &proof[..last]); - Digest::hash_pair(left, proof[last]) - } else { - let right = - compute_raw_root_from_proof(index - half, leaf_count - half, &proof[..last]); - Digest::hash_pair(proof[last], right) - } - } - - let raw_root = compute_raw_root_from_proof(index, count, proof); - Digest::hash_merkle_root(count, raw_root) - } - - /// Construct an `IndexedMerkleProof` with a proof of zero digests. - fn test_indexed_merkle_proof(index: u64, count: u64) -> IndexedMerkleProof { - let mut indexed_merkle_proof = IndexedMerkleProof { - index, - count, - merkle_proof: vec![], - root_hash: OnceCell::new(), - }; - let expected_proof_length = indexed_merkle_proof.compute_expected_proof_length(); - indexed_merkle_proof.merkle_proof = rand::thread_rng() - .sample_iter(Standard) - .take(expected_proof_length as usize) - .collect(); - indexed_merkle_proof - } - - #[proptest] - fn root_from_proof_agrees_with_recursion(index: u64, count: u64) { - let indexed_merkle_proof = test_indexed_merkle_proof(index, count); - prop_assert_eq!( - indexed_merkle_proof.root_hash(), - reference_root_from_proof( - indexed_merkle_proof.index, - indexed_merkle_proof.count, - indexed_merkle_proof.merkle_proof(), - ), - "Result did not agree with reference implementation.", - ); - } - - #[test] - fn root_from_proof_agrees_with_recursion_2147483648_4294967297() { - let indexed_merkle_proof = test_indexed_merkle_proof(2147483648, 4294967297); - assert_eq!( - indexed_merkle_proof.root_hash(), - reference_root_from_proof( - indexed_merkle_proof.index, - indexed_merkle_proof.count, - indexed_merkle_proof.merkle_proof(), - ), - "Result did not agree with reference implementation.", - ); - } - - #[test] - fn serde_deserialization_of_malformed_proof_should_work() { - let indexed_merkle_proof = test_indexed_merkle_proof(10, 10); - - let json = serde_json::to_string(&indexed_merkle_proof).unwrap(); - assert_eq!( - indexed_merkle_proof, - serde_json::from_str::(&json) - .expect("should deserialize correctly") - ); - - // Check that proof with index greater than count deserializes correctly - let mut indexed_merkle_proof = test_indexed_merkle_proof(10, 10); - indexed_merkle_proof.index += 1; - let json = serde_json::to_string(&indexed_merkle_proof).unwrap(); - serde_json::from_str::(&json).expect("should deserialize correctly"); - - // Check that proof with incorrect length deserializes correctly - let mut indexed_merkle_proof = test_indexed_merkle_proof(10, 10); - indexed_merkle_proof.merkle_proof.push(Digest::hash("XXX")); - let json = serde_json::to_string(&indexed_merkle_proof).unwrap(); - serde_json::from_str::(&json).expect("should deserialize correctly"); - } - - #[test] - fn bytesrepr_deserialization_of_malformed_proof_should_work() { - let indexed_merkle_proof = test_indexed_merkle_proof(10, 10); - - let bytes = indexed_merkle_proof - .to_bytes() - .expect("should serialize correctly"); - IndexedMerkleProof::from_bytes(&bytes).expect("should deserialize correctly"); - - // Check that proof with index greater than count deserializes correctly - let mut indexed_merkle_proof = test_indexed_merkle_proof(10, 10); - indexed_merkle_proof.index += 1; - let bytes = indexed_merkle_proof - .to_bytes() - .expect("should serialize correctly"); - IndexedMerkleProof::from_bytes(&bytes).expect("should deserialize correctly"); - - // Check that proof with incorrect length deserializes correctly - let mut indexed_merkle_proof = test_indexed_merkle_proof(10, 10); - indexed_merkle_proof.merkle_proof.push(Digest::hash("XXX")); - let bytes = indexed_merkle_proof - .to_bytes() - .expect("should serialize correctly"); - IndexedMerkleProof::from_bytes(&bytes).expect("should deserialize correctly"); - } - - #[test] - fn bytesrepr_serialization() { - let indexed_merkle_proof = random_indexed_merkle_proof(); - bytesrepr::test_serialization_roundtrip(&indexed_merkle_proof); - } -} diff --git a/casper_types_ver_2_0/src/display_iter.rs b/casper_types_ver_2_0/src/display_iter.rs deleted file mode 100644 index 00b23e84..00000000 --- a/casper_types_ver_2_0/src/display_iter.rs +++ /dev/null @@ -1,40 +0,0 @@ -use core::{ - cell::RefCell, - fmt::{self, Display, Formatter}, -}; - -/// A helper to allow `Display` printing the items of an iterator with a comma and space between -/// each. -#[derive(Debug)] -pub struct DisplayIter(RefCell>); - -impl DisplayIter { - /// Returns a new `DisplayIter`. - pub fn new(item: T) -> Self { - DisplayIter(RefCell::new(Some(item))) - } -} - -impl Display for DisplayIter -where - I: IntoIterator, - T: Display, -{ - fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { - if let Some(src) = self.0.borrow_mut().take() { - let mut first = true; - for item in src.into_iter().take(f.width().unwrap_or(usize::MAX)) { - if first { - first = false; - write!(f, "{}", item)?; - } else { - write!(f, ", {}", item)?; - } - } - - Ok(()) - } else { - write!(f, "DisplayIter:GONE") - } - } -} diff --git a/casper_types_ver_2_0/src/era_id.rs b/casper_types_ver_2_0/src/era_id.rs deleted file mode 100644 index 5179d59e..00000000 --- a/casper_types_ver_2_0/src/era_id.rs +++ /dev/null @@ -1,254 +0,0 @@ -use alloc::vec::Vec; -use core::{ - fmt::{self, Debug, Display, Formatter}, - num::ParseIntError, - ops::{Add, AddAssign, Sub}, - str::FromStr, -}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(any(feature = "testing", test))] -use rand::Rng; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -#[cfg(any(feature = "testing", test))] -use crate::testing::TestRng; -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - CLType, CLTyped, -}; - -/// Era ID newtype. -#[derive( - Debug, Default, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize, -)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[cfg_attr(feature = "testing", derive(proptest_derive::Arbitrary))] -#[serde(deny_unknown_fields)] -pub struct EraId(u64); - -impl EraId { - /// Maximum possible value an [`EraId`] can hold. - pub const MAX: EraId = EraId(u64::max_value()); - - /// Creates new [`EraId`] instance. - pub const fn new(value: u64) -> EraId { - EraId(value) - } - - /// Returns an iterator over era IDs of `num_eras` future eras starting from current. - pub fn iter(&self, num_eras: u64) -> impl Iterator { - let current_era_id = self.0; - (current_era_id..current_era_id + num_eras).map(EraId) - } - - /// Returns an iterator over era IDs of `num_eras` future eras starting from current, plus the - /// provided one. - pub fn iter_inclusive(&self, num_eras: u64) -> impl Iterator { - let current_era_id = self.0; - (current_era_id..=current_era_id + num_eras).map(EraId) - } - - /// Increments the era. - /// - /// For `u64::MAX`, this returns `u64::MAX` again: We want to make sure this doesn't panic, and - /// that era number will never be reached in practice. - pub fn increment(&mut self) { - self.0 = self.0.saturating_add(1); - } - - /// Returns a successor to current era. - /// - /// For `u64::MAX`, this returns `u64::MAX` again: We want to make sure this doesn't panic, and - /// that era number will never be reached in practice. - #[must_use] - pub fn successor(self) -> EraId { - EraId::from(self.0.saturating_add(1)) - } - - /// Returns the predecessor to current era, or `None` if genesis. - #[must_use] - pub fn predecessor(self) -> Option { - self.0.checked_sub(1).map(EraId) - } - - /// Returns the current era plus `x`, or `None` if that would overflow - pub fn checked_add(&self, x: u64) -> Option { - self.0.checked_add(x).map(EraId) - } - - /// Returns the current era minus `x`, or `None` if that would be less than `0`. - pub fn checked_sub(&self, x: u64) -> Option { - self.0.checked_sub(x).map(EraId) - } - - /// Returns the current era minus `x`, or `0` if that would be less than `0`. - #[must_use] - pub fn saturating_sub(&self, x: u64) -> EraId { - EraId::from(self.0.saturating_sub(x)) - } - - /// Returns the current era plus `x`, or [`EraId::MAX`] if overflow would occur. - #[must_use] - pub fn saturating_add(self, rhs: u64) -> EraId { - EraId(self.0.saturating_add(rhs)) - } - - /// Returns the current era times `x`, or [`EraId::MAX`] if overflow would occur. - #[must_use] - pub fn saturating_mul(&self, x: u64) -> EraId { - EraId::from(self.0.saturating_mul(x)) - } - - /// Returns whether this is era 0. - pub fn is_genesis(&self) -> bool { - self.0 == 0 - } - - /// Returns little endian bytes. - pub fn to_le_bytes(self) -> [u8; 8] { - self.0.to_le_bytes() - } - - /// Returns a raw value held by this [`EraId`] instance. - /// - /// You should prefer [`From`] trait implementations over this method where possible. - pub fn value(self) -> u64 { - self.0 - } - - /// Returns a random `EraId`. - #[cfg(any(feature = "testing", test))] - pub fn random(rng: &mut TestRng) -> Self { - EraId(rng.gen_range(0..1_000_000)) - } -} - -impl FromStr for EraId { - type Err = ParseIntError; - - fn from_str(s: &str) -> Result { - u64::from_str(s).map(EraId) - } -} - -impl Add for EraId { - type Output = EraId; - - #[allow(clippy::arithmetic_side_effects)] // The caller must make sure this doesn't overflow. - fn add(self, x: u64) -> EraId { - EraId::from(self.0 + x) - } -} - -impl AddAssign for EraId { - fn add_assign(&mut self, x: u64) { - self.0 += x; - } -} - -impl Sub for EraId { - type Output = EraId; - - #[allow(clippy::arithmetic_side_effects)] // The caller must make sure this doesn't overflow. - fn sub(self, x: u64) -> EraId { - EraId::from(self.0 - x) - } -} - -impl Display for EraId { - fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { - write!(f, "era {}", self.0) - } -} - -impl From for u64 { - fn from(era_id: EraId) -> Self { - era_id.value() - } -} - -impl From for EraId { - fn from(era_id: u64) -> Self { - EraId(era_id) - } -} - -impl ToBytes for EraId { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - self.0.to_bytes() - } - - fn serialized_length(&self) -> usize { - self.0.serialized_length() - } - - #[inline(always)] - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.0.write_bytes(writer)?; - Ok(()) - } -} - -impl FromBytes for EraId { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (id_value, remainder) = u64::from_bytes(bytes)?; - let era_id = EraId::from(id_value); - Ok((era_id, remainder)) - } -} - -impl CLTyped for EraId { - fn cl_type() -> CLType { - CLType::U64 - } -} - -#[cfg(test)] -mod tests { - use proptest::prelude::*; - - use super::*; - use crate::gens::era_id_arb; - - #[test] - fn should_calculate_correct_inclusive_future_eras() { - let auction_delay = 3; - - let current_era = EraId::from(42); - - let window: Vec = current_era.iter_inclusive(auction_delay).collect(); - assert_eq!(window.len(), auction_delay as usize + 1); - assert_eq!(window.first(), Some(¤t_era)); - assert_eq!( - window.iter().next_back(), - Some(&(current_era + auction_delay)) - ); - } - - #[test] - fn should_have_valid_genesis_era_id() { - let expected_initial_era_id = EraId::from(0); - assert!(expected_initial_era_id.is_genesis()); - assert!(!expected_initial_era_id.successor().is_genesis()) - } - - #[test] - fn should_increment_era_id() { - let mut era = EraId::from(0); - assert!(era.is_genesis()); - era.increment(); - assert_eq!(era.value(), 1, "should have incremented to 1"); - } - - proptest! { - #[test] - fn bytesrepr_roundtrip(era_id in era_id_arb()) { - bytesrepr::test_serialization_roundtrip(&era_id); - } - } -} diff --git a/casper_types_ver_2_0/src/execution.rs b/casper_types_ver_2_0/src/execution.rs deleted file mode 100644 index 887966df..00000000 --- a/casper_types_ver_2_0/src/execution.rs +++ /dev/null @@ -1,17 +0,0 @@ -//! Types related to execution of deploys. - -mod effects; -mod execution_result; -pub mod execution_result_v1; -mod execution_result_v2; -mod transform; -mod transform_error; -mod transform_kind; - -pub use effects::Effects; -pub use execution_result::ExecutionResult; -pub use execution_result_v1::ExecutionResultV1; -pub use execution_result_v2::ExecutionResultV2; -pub use transform::Transform; -pub use transform_error::TransformError; -pub use transform_kind::{TransformInstruction, TransformKind}; diff --git a/casper_types_ver_2_0/src/execution/effects.rs b/casper_types_ver_2_0/src/execution/effects.rs deleted file mode 100644 index e1031196..00000000 --- a/casper_types_ver_2_0/src/execution/effects.rs +++ /dev/null @@ -1,105 +0,0 @@ -use alloc::vec::Vec; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(any(feature = "testing", test))] -use rand::Rng; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -use super::Transform; -#[cfg(any(feature = "testing", test))] -use super::TransformKind; -use crate::bytesrepr::{self, FromBytes, ToBytes}; - -/// A log of all transforms produced during execution. -#[derive(Debug, Clone, Eq, Default, PartialEq, Serialize, Deserialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -pub struct Effects(Vec); - -impl Effects { - /// Constructs a new, empty `Effects`. - pub const fn new() -> Self { - Effects(vec![]) - } - - /// Returns a reference to the transforms. - pub fn transforms(&self) -> &[Transform] { - &self.0 - } - - /// Appends a transform. - pub fn push(&mut self, transform: Transform) { - self.0.push(transform) - } - - /// Moves all elements from `other` into `self`. - pub fn append(&mut self, mut other: Self) { - self.0.append(&mut other.0); - } - - /// Returns `true` if there are no transforms recorded. - pub fn is_empty(&self) -> bool { - self.0.is_empty() - } - - /// Returns the number of transforms recorded. - pub fn len(&self) -> usize { - self.0.len() - } - - /// Consumes `self`, returning the wrapped vec. - pub fn value(self) -> Vec { - self.0 - } - - /// Returns a random `Effects`. - #[cfg(any(feature = "testing", test))] - pub fn random(rng: &mut R) -> Self { - let mut effects = Effects::new(); - let transform_count = rng.gen_range(0..6); - for _ in 0..transform_count { - effects.push(Transform::new(rng.gen(), TransformKind::random(rng))); - } - effects - } -} - -impl ToBytes for Effects { - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.0.write_bytes(writer) - } - - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - self.0.serialized_length() - } -} - -impl FromBytes for Effects { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (transforms, remainder) = Vec::::from_bytes(bytes)?; - Ok((Effects(transforms), remainder)) - } -} - -#[cfg(test)] -mod tests { - use crate::testing::TestRng; - - use super::*; - - #[test] - fn bytesrepr_roundtrip() { - let rng = &mut TestRng::new(); - let effects = Effects::random(rng); - bytesrepr::test_serialization_roundtrip(&effects); - } -} diff --git a/casper_types_ver_2_0/src/execution/execution_result.rs b/casper_types_ver_2_0/src/execution/execution_result.rs deleted file mode 100644 index c24dfb1d..00000000 --- a/casper_types_ver_2_0/src/execution/execution_result.rs +++ /dev/null @@ -1,148 +0,0 @@ -use alloc::vec::Vec; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(any(feature = "testing", test))] -use rand::distributions::Distribution; -#[cfg(any(feature = "testing", test))] -use rand::Rng; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -use super::{ExecutionResultV1, ExecutionResultV2}; -use crate::bytesrepr::{self, FromBytes, ToBytes, U8_SERIALIZED_LENGTH}; -#[cfg(any(feature = "testing", test))] -use crate::testing::TestRng; - -const V1_TAG: u8 = 0; -const V2_TAG: u8 = 1; - -/// The versioned result of executing a single deploy. -#[derive(Clone, Eq, PartialEq, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[serde(deny_unknown_fields)] -pub enum ExecutionResult { - /// Version 1 of execution result type. - #[serde(rename = "Version1")] - V1(ExecutionResultV1), - /// Version 2 of execution result type. - #[serde(rename = "Version2")] - V2(ExecutionResultV2), -} - -impl ExecutionResult { - /// Returns a random ExecutionResult. - #[cfg(any(feature = "testing", test))] - pub fn random(rng: &mut TestRng) -> Self { - if rng.gen_bool(0.5) { - Self::V1(rand::distributions::Standard.sample(rng)) - } else { - Self::V2(ExecutionResultV2::random(rng)) - } - } -} - -impl From for ExecutionResult { - fn from(value: ExecutionResultV1) -> Self { - ExecutionResult::V1(value) - } -} - -impl From for ExecutionResult { - fn from(value: ExecutionResultV2) -> Self { - ExecutionResult::V2(value) - } -} - -impl ToBytes for ExecutionResult { - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - match self { - ExecutionResult::V1(result) => { - V1_TAG.write_bytes(writer)?; - result.write_bytes(writer) - } - ExecutionResult::V2(result) => { - V2_TAG.write_bytes(writer)?; - result.write_bytes(writer) - } - } - } - - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - U8_SERIALIZED_LENGTH - + match self { - ExecutionResult::V1(result) => result.serialized_length(), - ExecutionResult::V2(result) => result.serialized_length(), - } - } -} - -impl FromBytes for ExecutionResult { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (tag, remainder) = u8::from_bytes(bytes)?; - match tag { - V1_TAG => { - let (result, remainder) = ExecutionResultV1::from_bytes(remainder)?; - Ok((ExecutionResult::V1(result), remainder)) - } - V2_TAG => { - let (result, remainder) = ExecutionResultV2::from_bytes(remainder)?; - Ok((ExecutionResult::V2(result), remainder)) - } - _ => Err(bytesrepr::Error::Formatting), - } - } -} - -#[cfg(test)] -mod tests { - use rand::Rng; - - use super::*; - use crate::testing::TestRng; - - #[test] - fn bytesrepr_roundtrip() { - let rng = &mut TestRng::new(); - let execution_result = ExecutionResult::V1(rng.gen()); - bytesrepr::test_serialization_roundtrip(&execution_result); - let execution_result = ExecutionResult::from(ExecutionResultV2::random(rng)); - bytesrepr::test_serialization_roundtrip(&execution_result); - } - - #[test] - fn bincode_roundtrip() { - let rng = &mut TestRng::new(); - let execution_result = ExecutionResult::V1(rng.gen()); - let serialized = bincode::serialize(&execution_result).unwrap(); - let deserialized = bincode::deserialize(&serialized).unwrap(); - assert_eq!(execution_result, deserialized); - - let execution_result = ExecutionResult::from(ExecutionResultV2::random(rng)); - let serialized = bincode::serialize(&execution_result).unwrap(); - let deserialized = bincode::deserialize(&serialized).unwrap(); - assert_eq!(execution_result, deserialized); - } - - #[test] - fn json_roundtrip() { - let rng = &mut TestRng::new(); - let execution_result = ExecutionResult::V1(rng.gen()); - let serialized = serde_json::to_string(&execution_result).unwrap(); - let deserialized = serde_json::from_str(&serialized).unwrap(); - assert_eq!(execution_result, deserialized); - - let execution_result = ExecutionResult::from(ExecutionResultV2::random(rng)); - let serialized = serde_json::to_string(&execution_result).unwrap(); - let deserialized = serde_json::from_str(&serialized).unwrap(); - assert_eq!(execution_result, deserialized); - } -} diff --git a/casper_types_ver_2_0/src/execution/execution_result_v1.rs b/casper_types_ver_2_0/src/execution/execution_result_v1.rs deleted file mode 100644 index bf8f908a..00000000 --- a/casper_types_ver_2_0/src/execution/execution_result_v1.rs +++ /dev/null @@ -1,794 +0,0 @@ -//! Types for reporting results of execution pre `casper-node` v2.0.0. - -use core::convert::TryFrom; - -use alloc::{boxed::Box, string::String, vec::Vec}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -use num::{FromPrimitive, ToPrimitive}; -use num_derive::{FromPrimitive, ToPrimitive}; -#[cfg(any(feature = "testing", test))] -use rand::{ - distributions::{Distribution, Standard}, - seq::SliceRandom, - Rng, -}; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -use crate::{ - account::AccountHash, - bytesrepr::{self, FromBytes, ToBytes, U8_SERIALIZED_LENGTH}, - system::auction::{Bid, BidKind, EraInfo, UnbondingPurse, WithdrawPurse}, - CLValue, DeployInfo, Key, Transfer, TransferAddr, U128, U256, U512, -}; - -#[derive(FromPrimitive, ToPrimitive, Debug)] -#[repr(u8)] -enum ExecutionResultTag { - Failure = 0, - Success = 1, -} - -impl TryFrom for ExecutionResultTag { - type Error = bytesrepr::Error; - - fn try_from(value: u8) -> Result { - FromPrimitive::from_u8(value).ok_or(bytesrepr::Error::Formatting) - } -} - -#[derive(FromPrimitive, ToPrimitive, Debug)] -#[repr(u8)] -enum OpTag { - Read = 0, - Write = 1, - Add = 2, - NoOp = 3, - Prune = 4, -} - -impl TryFrom for OpTag { - type Error = bytesrepr::Error; - - fn try_from(value: u8) -> Result { - FromPrimitive::from_u8(value).ok_or(bytesrepr::Error::Formatting) - } -} - -#[derive(FromPrimitive, ToPrimitive, Debug)] -#[repr(u8)] -enum TransformTag { - Identity = 0, - WriteCLValue = 1, - WriteAccount = 2, - WriteByteCode = 3, - WriteContract = 4, - WritePackage = 5, - WriteDeployInfo = 6, - WriteTransfer = 7, - WriteEraInfo = 8, - WriteBid = 9, - WriteWithdraw = 10, - AddInt32 = 11, - AddUInt64 = 12, - AddUInt128 = 13, - AddUInt256 = 14, - AddUInt512 = 15, - AddKeys = 16, - Failure = 17, - WriteUnbonding = 18, - WriteAddressableEntity = 19, - Prune = 20, - WriteBidKind = 21, -} - -impl TryFrom for TransformTag { - type Error = bytesrepr::Error; - - fn try_from(value: u8) -> Result { - FromPrimitive::from_u8(value).ok_or(bytesrepr::Error::Formatting) - } -} - -/// The result of executing a single deploy. -#[derive(Clone, Eq, PartialEq, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[serde(deny_unknown_fields)] -pub enum ExecutionResultV1 { - /// The result of a failed execution. - Failure { - /// The effect of executing the deploy. - effect: ExecutionEffect, - /// A record of Transfers performed while executing the deploy. - transfers: Vec, - /// The cost of executing the deploy. - cost: U512, - /// The error message associated with executing the deploy. - error_message: String, - }, - /// The result of a successful execution. - Success { - /// The effect of executing the deploy. - effect: ExecutionEffect, - /// A record of Transfers performed while executing the deploy. - transfers: Vec, - /// The cost of executing the deploy. - cost: U512, - }, -} - -#[cfg(any(feature = "testing", test))] -impl Distribution for Standard { - fn sample(&self, rng: &mut R) -> ExecutionResultV1 { - let op_count = rng.gen_range(0..6); - let mut operations = Vec::new(); - for _ in 0..op_count { - let op = [OpKind::Read, OpKind::Add, OpKind::NoOp, OpKind::Write] - .choose(rng) - .unwrap(); - operations.push(Operation { - key: rng.gen::().to_string(), - kind: *op, - }); - } - - let transform_count = rng.gen_range(0..6); - let mut transforms = Vec::new(); - for _ in 0..transform_count { - transforms.push(TransformEntry { - key: rng.gen::().to_string(), - transform: rng.gen(), - }); - } - - let execution_effect = ExecutionEffect { - operations, - transforms, - }; - - let transfer_count = rng.gen_range(0..6); - let mut transfers = Vec::new(); - for _ in 0..transfer_count { - transfers.push(TransferAddr::new(rng.gen())) - } - - if rng.gen() { - ExecutionResultV1::Failure { - effect: execution_effect, - transfers, - cost: rng.gen::().into(), - error_message: format!("Error message {}", rng.gen::()), - } - } else { - ExecutionResultV1::Success { - effect: execution_effect, - transfers, - cost: rng.gen::().into(), - } - } - } -} - -impl ToBytes for ExecutionResultV1 { - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - match self { - ExecutionResultV1::Failure { - effect, - transfers, - cost, - error_message, - } => { - (ExecutionResultTag::Failure as u8).write_bytes(writer)?; - effect.write_bytes(writer)?; - transfers.write_bytes(writer)?; - cost.write_bytes(writer)?; - error_message.write_bytes(writer) - } - ExecutionResultV1::Success { - effect, - transfers, - cost, - } => { - (ExecutionResultTag::Success as u8).write_bytes(writer)?; - effect.write_bytes(writer)?; - transfers.write_bytes(writer)?; - cost.write_bytes(writer) - } - } - } - - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - U8_SERIALIZED_LENGTH - + match self { - ExecutionResultV1::Failure { - effect, - transfers, - cost, - error_message, - } => { - effect.serialized_length() - + transfers.serialized_length() - + cost.serialized_length() - + error_message.serialized_length() - } - ExecutionResultV1::Success { - effect, - transfers, - cost, - } => { - effect.serialized_length() - + transfers.serialized_length() - + cost.serialized_length() - } - } - } -} - -impl FromBytes for ExecutionResultV1 { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (tag, remainder) = u8::from_bytes(bytes)?; - match TryFrom::try_from(tag)? { - ExecutionResultTag::Failure => { - let (effect, remainder) = ExecutionEffect::from_bytes(remainder)?; - let (transfers, remainder) = Vec::::from_bytes(remainder)?; - let (cost, remainder) = U512::from_bytes(remainder)?; - let (error_message, remainder) = String::from_bytes(remainder)?; - let execution_result = ExecutionResultV1::Failure { - effect, - transfers, - cost, - error_message, - }; - Ok((execution_result, remainder)) - } - ExecutionResultTag::Success => { - let (execution_effect, remainder) = ExecutionEffect::from_bytes(remainder)?; - let (transfers, remainder) = Vec::::from_bytes(remainder)?; - let (cost, remainder) = U512::from_bytes(remainder)?; - let execution_result = ExecutionResultV1::Success { - effect: execution_effect, - transfers, - cost, - }; - Ok((execution_result, remainder)) - } - } - } -} - -/// The sequence of execution transforms from a single deploy. -#[derive(Clone, Eq, PartialEq, Serialize, Deserialize, Default, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[serde(deny_unknown_fields)] -pub struct ExecutionEffect { - /// The resulting operations. - pub operations: Vec, - /// The sequence of execution transforms. - pub transforms: Vec, -} - -impl ToBytes for ExecutionEffect { - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.operations.write_bytes(writer)?; - self.transforms.write_bytes(writer) - } - - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - self.operations.serialized_length() + self.transforms.serialized_length() - } -} - -impl FromBytes for ExecutionEffect { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (operations, remainder) = Vec::::from_bytes(bytes)?; - let (transforms, remainder) = Vec::::from_bytes(remainder)?; - let json_effects = ExecutionEffect { - operations, - transforms, - }; - Ok((json_effects, remainder)) - } -} - -/// An operation performed while executing a deploy. -#[derive(Clone, Eq, PartialEq, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[serde(deny_unknown_fields)] -pub struct Operation { - /// The formatted string of the `Key`. - pub key: String, - /// The type of operation. - pub kind: OpKind, -} - -impl ToBytes for Operation { - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.key.write_bytes(writer)?; - self.kind.write_bytes(writer) - } - - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - self.key.serialized_length() + self.kind.serialized_length() - } -} - -impl FromBytes for Operation { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (key, remainder) = String::from_bytes(bytes)?; - let (kind, remainder) = OpKind::from_bytes(remainder)?; - let operation = Operation { key, kind }; - Ok((operation, remainder)) - } -} - -/// The type of operation performed while executing a deploy. -#[derive(Clone, Copy, Eq, PartialEq, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[serde(deny_unknown_fields)] -pub enum OpKind { - /// A read operation. - Read, - /// A write operation. - Write, - /// An addition. - Add, - /// An operation which has no effect. - NoOp, - /// A prune operation. - Prune, -} - -impl OpKind { - fn tag(&self) -> OpTag { - match self { - OpKind::Read => OpTag::Read, - OpKind::Write => OpTag::Write, - OpKind::Add => OpTag::Add, - OpKind::NoOp => OpTag::NoOp, - OpKind::Prune => OpTag::Prune, - } - } -} - -impl ToBytes for OpKind { - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - let tag_byte = self.tag().to_u8().ok_or(bytesrepr::Error::Formatting)?; - tag_byte.write_bytes(writer) - } - - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - U8_SERIALIZED_LENGTH - } -} - -impl FromBytes for OpKind { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (tag, remainder) = u8::from_bytes(bytes)?; - match TryFrom::try_from(tag)? { - OpTag::Read => Ok((OpKind::Read, remainder)), - OpTag::Write => Ok((OpKind::Write, remainder)), - OpTag::Add => Ok((OpKind::Add, remainder)), - OpTag::NoOp => Ok((OpKind::NoOp, remainder)), - OpTag::Prune => Ok((OpKind::Prune, remainder)), - } - } -} - -/// A transformation performed while executing a deploy. -#[derive(Clone, Eq, PartialEq, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[serde(deny_unknown_fields)] -pub struct TransformEntry { - /// The formatted string of the `Key`. - pub key: String, - /// The transformation. - pub transform: Transform, -} - -impl ToBytes for TransformEntry { - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.key.write_bytes(writer)?; - self.transform.write_bytes(writer) - } - - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - self.key.serialized_length() + self.transform.serialized_length() - } -} - -impl FromBytes for TransformEntry { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (key, remainder) = String::from_bytes(bytes)?; - let (transform, remainder) = Transform::from_bytes(remainder)?; - let transform_entry = TransformEntry { key, transform }; - Ok((transform_entry, remainder)) - } -} - -/// The actual transformation performed while executing a deploy. -#[derive(Clone, Eq, PartialEq, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[cfg_attr(feature = "json-schema", schemars(rename = "TransformV1"))] -#[serde(deny_unknown_fields)] -pub enum Transform { - /// A transform having no effect. - Identity, - /// Writes the given CLValue to global state. - WriteCLValue(CLValue), - /// Writes the given Account to global state. - WriteAccount(AccountHash), - /// Writes a smart contract as Wasm to global state. - WriteContractWasm, - /// Writes a smart contract to global state. - WriteContract, - /// Writes a smart contract package to global state. - WriteContractPackage, - /// Writes the given DeployInfo to global state. - WriteDeployInfo(DeployInfo), - /// Writes the given EraInfo to global state. - WriteEraInfo(EraInfo), - /// Writes the given Transfer to global state. - WriteTransfer(Transfer), - /// Writes the given Bid to global state. - WriteBid(Box), - /// Writes the given Withdraw to global state. - WriteWithdraw(Vec), - /// Adds the given `i32`. - AddInt32(i32), - /// Adds the given `u64`. - AddUInt64(u64), - /// Adds the given `U128`. - AddUInt128(U128), - /// Adds the given `U256`. - AddUInt256(U256), - /// Adds the given `U512`. - AddUInt512(U512), - /// Adds the given collection of named keys. - AddKeys(Vec), - /// A failed transformation, containing an error message. - Failure(String), - /// Writes the given Unbonding to global state. - WriteUnbonding(Vec), - /// Writes the addressable entity to global state. - WriteAddressableEntity, - /// Removes pathing to keyed value within global state. This is a form of soft delete; the - /// underlying value remains in global state and is reachable from older global state root - /// hashes where it was included in the hash up. - Prune(Key), - /// Writes the given BidKind to global state. - WriteBidKind(BidKind), -} - -impl ToBytes for Transform { - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - match self { - Transform::Identity => (TransformTag::Identity as u8).write_bytes(writer), - Transform::WriteCLValue(value) => { - (TransformTag::WriteCLValue as u8).write_bytes(writer)?; - value.write_bytes(writer) - } - Transform::WriteAccount(account_hash) => { - (TransformTag::WriteAccount as u8).write_bytes(writer)?; - account_hash.write_bytes(writer) - } - Transform::WriteContractWasm => (TransformTag::WriteByteCode as u8).write_bytes(writer), - Transform::WriteContract => (TransformTag::WriteContract as u8).write_bytes(writer), - Transform::WriteContractPackage => { - (TransformTag::WritePackage as u8).write_bytes(writer) - } - Transform::WriteDeployInfo(deploy_info) => { - (TransformTag::WriteDeployInfo as u8).write_bytes(writer)?; - deploy_info.write_bytes(writer) - } - Transform::WriteEraInfo(era_info) => { - (TransformTag::WriteEraInfo as u8).write_bytes(writer)?; - era_info.write_bytes(writer) - } - Transform::WriteTransfer(transfer) => { - (TransformTag::WriteTransfer as u8).write_bytes(writer)?; - transfer.write_bytes(writer) - } - Transform::WriteBid(bid) => { - (TransformTag::WriteBid as u8).write_bytes(writer)?; - bid.write_bytes(writer) - } - Transform::WriteWithdraw(unbonding_purses) => { - (TransformTag::WriteWithdraw as u8).write_bytes(writer)?; - unbonding_purses.write_bytes(writer) - } - Transform::AddInt32(value) => { - (TransformTag::AddInt32 as u8).write_bytes(writer)?; - value.write_bytes(writer) - } - Transform::AddUInt64(value) => { - (TransformTag::AddUInt64 as u8).write_bytes(writer)?; - value.write_bytes(writer) - } - Transform::AddUInt128(value) => { - (TransformTag::AddUInt128 as u8).write_bytes(writer)?; - value.write_bytes(writer) - } - Transform::AddUInt256(value) => { - (TransformTag::AddUInt256 as u8).write_bytes(writer)?; - value.write_bytes(writer) - } - Transform::AddUInt512(value) => { - (TransformTag::AddUInt512 as u8).write_bytes(writer)?; - value.write_bytes(writer) - } - Transform::AddKeys(value) => { - (TransformTag::AddKeys as u8).write_bytes(writer)?; - value.write_bytes(writer) - } - Transform::Failure(value) => { - (TransformTag::Failure as u8).write_bytes(writer)?; - value.write_bytes(writer) - } - Transform::WriteUnbonding(value) => { - (TransformTag::WriteUnbonding as u8).write_bytes(writer)?; - value.write_bytes(writer) - } - Transform::WriteAddressableEntity => { - (TransformTag::WriteAddressableEntity as u8).write_bytes(writer) - } - Transform::Prune(value) => { - (TransformTag::Prune as u8).write_bytes(writer)?; - value.write_bytes(writer) - } - Transform::WriteBidKind(value) => { - (TransformTag::WriteBidKind as u8).write_bytes(writer)?; - value.write_bytes(writer) - } - } - } - - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - let body_len = match self { - Transform::Prune(key) => key.serialized_length(), - Transform::WriteCLValue(value) => value.serialized_length(), - Transform::WriteAccount(value) => value.serialized_length(), - Transform::WriteDeployInfo(value) => value.serialized_length(), - Transform::WriteEraInfo(value) => value.serialized_length(), - Transform::WriteTransfer(value) => value.serialized_length(), - Transform::AddInt32(value) => value.serialized_length(), - Transform::AddUInt64(value) => value.serialized_length(), - Transform::AddUInt128(value) => value.serialized_length(), - Transform::AddUInt256(value) => value.serialized_length(), - Transform::AddUInt512(value) => value.serialized_length(), - Transform::AddKeys(value) => value.serialized_length(), - Transform::Failure(value) => value.serialized_length(), - Transform::Identity - | Transform::WriteContractWasm - | Transform::WriteContract - | Transform::WriteContractPackage - | Transform::WriteAddressableEntity => 0, - Transform::WriteBid(value) => value.serialized_length(), - Transform::WriteBidKind(value) => value.serialized_length(), - Transform::WriteWithdraw(value) => value.serialized_length(), - Transform::WriteUnbonding(value) => value.serialized_length(), - }; - U8_SERIALIZED_LENGTH + body_len - } -} - -impl FromBytes for Transform { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (tag, remainder) = u8::from_bytes(bytes)?; - match TryFrom::try_from(tag)? { - TransformTag::Identity => Ok((Transform::Identity, remainder)), - TransformTag::WriteCLValue => { - let (cl_value, remainder) = CLValue::from_bytes(remainder)?; - Ok((Transform::WriteCLValue(cl_value), remainder)) - } - TransformTag::WriteAccount => { - let (account_hash, remainder) = AccountHash::from_bytes(remainder)?; - Ok((Transform::WriteAccount(account_hash), remainder)) - } - TransformTag::WriteByteCode => Ok((Transform::WriteContractWasm, remainder)), - TransformTag::WriteContract => Ok((Transform::WriteContract, remainder)), - TransformTag::WritePackage => Ok((Transform::WriteContractPackage, remainder)), - TransformTag::WriteDeployInfo => { - let (deploy_info, remainder) = DeployInfo::from_bytes(remainder)?; - Ok((Transform::WriteDeployInfo(deploy_info), remainder)) - } - TransformTag::WriteEraInfo => { - let (era_info, remainder) = EraInfo::from_bytes(remainder)?; - Ok((Transform::WriteEraInfo(era_info), remainder)) - } - TransformTag::WriteTransfer => { - let (transfer, remainder) = Transfer::from_bytes(remainder)?; - Ok((Transform::WriteTransfer(transfer), remainder)) - } - TransformTag::AddInt32 => { - let (value_i32, remainder) = i32::from_bytes(remainder)?; - Ok((Transform::AddInt32(value_i32), remainder)) - } - TransformTag::AddUInt64 => { - let (value_u64, remainder) = u64::from_bytes(remainder)?; - Ok((Transform::AddUInt64(value_u64), remainder)) - } - TransformTag::AddUInt128 => { - let (value_u128, remainder) = U128::from_bytes(remainder)?; - Ok((Transform::AddUInt128(value_u128), remainder)) - } - TransformTag::AddUInt256 => { - let (value_u256, remainder) = U256::from_bytes(remainder)?; - Ok((Transform::AddUInt256(value_u256), remainder)) - } - TransformTag::AddUInt512 => { - let (value_u512, remainder) = U512::from_bytes(remainder)?; - Ok((Transform::AddUInt512(value_u512), remainder)) - } - TransformTag::AddKeys => { - let (value, remainder) = Vec::::from_bytes(remainder)?; - Ok((Transform::AddKeys(value), remainder)) - } - TransformTag::Failure => { - let (value, remainder) = String::from_bytes(remainder)?; - Ok((Transform::Failure(value), remainder)) - } - TransformTag::WriteBid => { - let (bid, remainder) = Bid::from_bytes(remainder)?; - Ok((Transform::WriteBid(Box::new(bid)), remainder)) - } - TransformTag::WriteWithdraw => { - let (withdraw_purses, remainder) = - as FromBytes>::from_bytes(remainder)?; - Ok((Transform::WriteWithdraw(withdraw_purses), remainder)) - } - TransformTag::WriteUnbonding => { - let (unbonding_purses, remainder) = - as FromBytes>::from_bytes(remainder)?; - Ok((Transform::WriteUnbonding(unbonding_purses), remainder)) - } - TransformTag::WriteAddressableEntity => { - Ok((Transform::WriteAddressableEntity, remainder)) - } - TransformTag::Prune => { - let (key, remainder) = Key::from_bytes(remainder)?; - Ok((Transform::Prune(key), remainder)) - } - TransformTag::WriteBidKind => { - let (value, remainder) = BidKind::from_bytes(remainder)?; - Ok((Transform::WriteBidKind(value), remainder)) - } - } - } -} - -#[cfg(any(feature = "testing", test))] -impl Distribution for Standard { - fn sample(&self, rng: &mut R) -> Transform { - // TODO - include WriteDeployInfo and WriteTransfer as options - match rng.gen_range(0..13) { - 0 => Transform::Identity, - 1 => Transform::WriteCLValue(CLValue::from_t(true).unwrap()), - 2 => Transform::WriteAccount(AccountHash::new(rng.gen())), - 3 => Transform::WriteContractWasm, - 4 => Transform::WriteContract, - 5 => Transform::WriteContractPackage, - 6 => Transform::AddInt32(rng.gen()), - 7 => Transform::AddUInt64(rng.gen()), - 8 => Transform::AddUInt128(rng.gen::().into()), - 9 => Transform::AddUInt256(rng.gen::().into()), - 10 => Transform::AddUInt512(rng.gen::().into()), - 11 => { - let mut named_keys = Vec::new(); - for _ in 0..rng.gen_range(1..6) { - named_keys.push(NamedKey { - name: rng.gen::().to_string(), - key: rng.gen::().to_string(), - }); - } - Transform::AddKeys(named_keys) - } - 12 => Transform::Failure(rng.gen::().to_string()), - 13 => Transform::WriteAddressableEntity, - _ => unreachable!(), - } - } -} - -/// A key with a name. -#[derive(Clone, Eq, PartialEq, Serialize, Deserialize, Default, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[serde(deny_unknown_fields)] -pub struct NamedKey { - /// The name of the entry. - pub name: String, - /// The value of the entry: a casper `Key` type. - #[cfg_attr(feature = "json-schema", schemars(with = "Key"))] - pub key: String, -} - -impl ToBytes for NamedKey { - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.name.write_bytes(writer)?; - self.key.write_bytes(writer) - } - - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - self.name.serialized_length() + self.key.serialized_length() - } -} - -impl FromBytes for NamedKey { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (name, remainder) = String::from_bytes(bytes)?; - let (key, remainder) = String::from_bytes(remainder)?; - let named_key = NamedKey { name, key }; - Ok((named_key, remainder)) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::testing::TestRng; - - #[test] - fn bytesrepr_test_transform() { - let mut rng = TestRng::new(); - let transform: Transform = rng.gen(); - bytesrepr::test_serialization_roundtrip(&transform); - } - - #[test] - fn bytesrepr_test_execution_result() { - let mut rng = TestRng::new(); - let execution_result: ExecutionResultV1 = rng.gen(); - bytesrepr::test_serialization_roundtrip(&execution_result); - } -} diff --git a/casper_types_ver_2_0/src/execution/execution_result_v2.rs b/casper_types_ver_2_0/src/execution/execution_result_v2.rs deleted file mode 100644 index 9470c133..00000000 --- a/casper_types_ver_2_0/src/execution/execution_result_v2.rs +++ /dev/null @@ -1,259 +0,0 @@ -//! This file provides types to allow conversion from an EE `ExecutionResult` into a similar type -//! which can be serialized to a valid binary or JSON representation. -//! -//! It is stored as metadata related to a given deploy, and made available to clients via the -//! JSON-RPC API. - -#[cfg(any(feature = "testing", test))] -use alloc::format; -use alloc::{string::String, vec::Vec}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(feature = "json-schema")] -use once_cell::sync::Lazy; -#[cfg(any(feature = "testing", test))] -use rand::{distributions::Standard, prelude::Distribution, Rng}; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -use super::Effects; -#[cfg(feature = "json-schema")] -use super::{Transform, TransformKind}; -#[cfg(any(feature = "testing", test))] -use crate::testing::TestRng; -use crate::{ - bytesrepr::{self, FromBytes, ToBytes, RESULT_ERR_TAG, RESULT_OK_TAG, U8_SERIALIZED_LENGTH}, - TransferAddr, U512, -}; -#[cfg(feature = "json-schema")] -use crate::{Key, KEY_HASH_LENGTH}; - -#[cfg(feature = "json-schema")] -static EXECUTION_RESULT: Lazy = Lazy::new(|| { - let key1 = Key::from_formatted_str( - "account-hash-2c4a11c062a8a337bfc97e27fd66291caeb2c65865dcb5d3ef3759c4c97efecb", - ) - .unwrap(); - let key2 = Key::from_formatted_str( - "deploy-af684263911154d26fa05be9963171802801a0b6aff8f199b7391eacb8edc9e1", - ) - .unwrap(); - let mut effects = Effects::new(); - effects.push(Transform::new(key1, TransformKind::AddUInt64(8u64))); - effects.push(Transform::new(key2, TransformKind::Identity)); - - let transfers = vec![ - TransferAddr::new([89; KEY_HASH_LENGTH]), - TransferAddr::new([130; KEY_HASH_LENGTH]), - ]; - - ExecutionResultV2::Success { - effects, - transfers, - cost: U512::from(123_456), - } -}); - -/// The result of executing a single deploy. -#[derive(Clone, Eq, PartialEq, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[serde(deny_unknown_fields)] -pub enum ExecutionResultV2 { - /// The result of a failed execution. - Failure { - /// The effects of executing the deploy. - effects: Effects, - /// A record of transfers performed while executing the deploy. - transfers: Vec, - /// The cost in Motes of executing the deploy. - cost: U512, - /// The error message associated with executing the deploy. - error_message: String, - }, - /// The result of a successful execution. - Success { - /// The effects of executing the deploy. - effects: Effects, - /// A record of transfers performed while executing the deploy. - transfers: Vec, - /// The cost in Motes of executing the deploy. - cost: U512, - }, -} - -#[cfg(any(feature = "testing", test))] -impl Distribution for Standard { - fn sample(&self, rng: &mut R) -> ExecutionResultV2 { - let transfer_count = rng.gen_range(0..6); - let mut transfers = Vec::new(); - for _ in 0..transfer_count { - transfers.push(TransferAddr::new(rng.gen())) - } - - let effects = Effects::random(rng); - - if rng.gen() { - ExecutionResultV2::Failure { - effects, - transfers, - cost: rng.gen::().into(), - error_message: format!("Error message {}", rng.gen::()), - } - } else { - ExecutionResultV2::Success { - effects, - transfers, - cost: rng.gen::().into(), - } - } - } -} - -impl ExecutionResultV2 { - // This method is not intended to be used by third party crates. - #[doc(hidden)] - #[cfg(feature = "json-schema")] - pub fn example() -> &'static Self { - &EXECUTION_RESULT - } - - /// Returns a random `ExecutionResultV2`. - #[cfg(any(feature = "testing", test))] - pub fn random(rng: &mut TestRng) -> Self { - let effects = Effects::random(rng); - - let transfer_count = rng.gen_range(0..6); - let mut transfers = vec![]; - for _ in 0..transfer_count { - transfers.push(TransferAddr::new(rng.gen())) - } - - let cost = U512::from(rng.gen::()); - - if rng.gen() { - ExecutionResultV2::Failure { - effects, - transfers, - cost, - error_message: format!("Error message {}", rng.gen::()), - } - } else { - ExecutionResultV2::Success { - effects, - transfers, - cost, - } - } - } -} - -impl ToBytes for ExecutionResultV2 { - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - match self { - ExecutionResultV2::Failure { - effects, - transfers, - cost, - error_message, - } => { - RESULT_ERR_TAG.write_bytes(writer)?; - effects.write_bytes(writer)?; - transfers.write_bytes(writer)?; - cost.write_bytes(writer)?; - error_message.write_bytes(writer) - } - ExecutionResultV2::Success { - effects, - transfers, - cost, - } => { - RESULT_OK_TAG.write_bytes(writer)?; - effects.write_bytes(writer)?; - transfers.write_bytes(writer)?; - cost.write_bytes(writer) - } - } - } - - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - U8_SERIALIZED_LENGTH - + match self { - ExecutionResultV2::Failure { - effects, - transfers, - cost, - error_message, - } => { - effects.serialized_length() - + transfers.serialized_length() - + cost.serialized_length() - + error_message.serialized_length() - } - ExecutionResultV2::Success { - effects, - transfers, - cost, - } => { - effects.serialized_length() - + transfers.serialized_length() - + cost.serialized_length() - } - } - } -} - -impl FromBytes for ExecutionResultV2 { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (tag, remainder) = u8::from_bytes(bytes)?; - match tag { - RESULT_ERR_TAG => { - let (effects, remainder) = Effects::from_bytes(remainder)?; - let (transfers, remainder) = Vec::::from_bytes(remainder)?; - let (cost, remainder) = U512::from_bytes(remainder)?; - let (error_message, remainder) = String::from_bytes(remainder)?; - let execution_result = ExecutionResultV2::Failure { - effects, - transfers, - cost, - error_message, - }; - Ok((execution_result, remainder)) - } - RESULT_OK_TAG => { - let (effects, remainder) = Effects::from_bytes(remainder)?; - let (transfers, remainder) = Vec::::from_bytes(remainder)?; - let (cost, remainder) = U512::from_bytes(remainder)?; - let execution_result = ExecutionResultV2::Success { - effects, - transfers, - cost, - }; - Ok((execution_result, remainder)) - } - _ => Err(bytesrepr::Error::Formatting), - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn bytesrepr_roundtrip() { - let rng = &mut TestRng::new(); - for _ in 0..10 { - let execution_result = ExecutionResultV2::random(rng); - bytesrepr::test_serialization_roundtrip(&execution_result); - } - } -} diff --git a/casper_types_ver_2_0/src/execution/transform.rs b/casper_types_ver_2_0/src/execution/transform.rs deleted file mode 100644 index c0fd9f98..00000000 --- a/casper_types_ver_2_0/src/execution/transform.rs +++ /dev/null @@ -1,75 +0,0 @@ -use alloc::vec::Vec; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -use super::TransformKind; -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - Key, -}; - -/// A transformation performed while executing a deploy. -#[derive(Clone, Eq, PartialEq, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[cfg_attr(feature = "json-schema", schemars(rename = "TransformV2"))] -#[serde(deny_unknown_fields)] -pub struct Transform { - key: Key, - kind: TransformKind, -} - -impl Transform { - /// Constructs a new `Transform`. - pub fn new(key: Key, kind: TransformKind) -> Self { - Transform { key, kind } - } - - /// Returns the key whose value was transformed. - pub fn key(&self) -> &Key { - &self.key - } - - /// Returns the transformation kind. - pub fn kind(&self) -> &TransformKind { - &self.kind - } - - /// Consumes `self`, returning its constituent parts. - pub fn destructure(self) -> (Key, TransformKind) { - (self.key, self.kind) - } -} - -impl ToBytes for Transform { - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.key.write_bytes(writer)?; - self.kind.write_bytes(writer) - } - - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - self.key.serialized_length() + self.kind.serialized_length() - } -} - -impl FromBytes for Transform { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (key, remainder) = Key::from_bytes(bytes)?; - let (transform, remainder) = TransformKind::from_bytes(remainder)?; - let transform_entry = Transform { - key, - kind: transform, - }; - Ok((transform_entry, remainder)) - } -} diff --git a/casper_types_ver_2_0/src/execution/transform_error.rs b/casper_types_ver_2_0/src/execution/transform_error.rs deleted file mode 100644 index 7936b8fa..00000000 --- a/casper_types_ver_2_0/src/execution/transform_error.rs +++ /dev/null @@ -1,136 +0,0 @@ -use alloc::vec::Vec; -use core::fmt::{self, Display, Formatter}; -#[cfg(feature = "std")] -use std::error::Error as StdError; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -use crate::{ - bytesrepr::{self, FromBytes, ToBytes, U8_SERIALIZED_LENGTH}, - CLValueError, StoredValueTypeMismatch, -}; - -/// Error type for applying and combining transforms. -/// -/// A `TypeMismatch` occurs when a transform cannot be applied because the types are not compatible -/// (e.g. trying to add a number to a string). -#[derive(PartialEq, Eq, Clone, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[non_exhaustive] -pub enum TransformError { - /// Error while (de)serializing data. - Serialization(bytesrepr::Error), - /// Type mismatch error. - TypeMismatch(StoredValueTypeMismatch), - /// Type no longer supported. - Deprecated, -} - -impl Display for TransformError { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - match self { - TransformError::Serialization(error) => { - write!(formatter, "{}", error) - } - TransformError::TypeMismatch(error) => { - write!(formatter, "{}", error) - } - TransformError::Deprecated => { - write!(formatter, "type no longer supported") - } - } - } -} - -impl From for TransformError { - fn from(error: StoredValueTypeMismatch) -> Self { - TransformError::TypeMismatch(error) - } -} - -impl From for TransformError { - fn from(cl_value_error: CLValueError) -> TransformError { - match cl_value_error { - CLValueError::Serialization(error) => TransformError::Serialization(error), - CLValueError::Type(cl_type_mismatch) => { - let expected = format!("{:?}", cl_type_mismatch.expected); - let found = format!("{:?}", cl_type_mismatch.found); - let type_mismatch = StoredValueTypeMismatch::new(expected, found); - TransformError::TypeMismatch(type_mismatch) - } - } - } -} - -impl ToBytes for TransformError { - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - match self { - TransformError::Serialization(error) => { - (TransformErrorTag::Serialization as u8).write_bytes(writer)?; - error.write_bytes(writer) - } - TransformError::TypeMismatch(error) => { - (TransformErrorTag::TypeMismatch as u8).write_bytes(writer)?; - error.write_bytes(writer) - } - TransformError::Deprecated => (TransformErrorTag::Deprecated as u8).write_bytes(writer), - } - } - - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - U8_SERIALIZED_LENGTH - + match self { - TransformError::Serialization(error) => error.serialized_length(), - TransformError::TypeMismatch(error) => error.serialized_length(), - TransformError::Deprecated => 0, - } - } -} - -impl FromBytes for TransformError { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (tag, remainder) = u8::from_bytes(bytes)?; - match tag { - tag if tag == TransformErrorTag::Serialization as u8 => { - let (error, remainder) = bytesrepr::Error::from_bytes(remainder)?; - Ok((TransformError::Serialization(error), remainder)) - } - tag if tag == TransformErrorTag::TypeMismatch as u8 => { - let (error, remainder) = StoredValueTypeMismatch::from_bytes(remainder)?; - Ok((TransformError::TypeMismatch(error), remainder)) - } - tag if tag == TransformErrorTag::Deprecated as u8 => { - Ok((TransformError::Deprecated, remainder)) - } - _ => Err(bytesrepr::Error::Formatting), - } - } -} - -#[cfg(feature = "std")] -impl StdError for TransformError { - fn source(&self) -> Option<&(dyn StdError + 'static)> { - match self { - TransformError::Serialization(error) => Some(error), - TransformError::TypeMismatch(_) | TransformError::Deprecated => None, - } - } -} - -#[repr(u8)] -enum TransformErrorTag { - Serialization = 0, - TypeMismatch = 1, - Deprecated = 2, -} diff --git a/casper_types_ver_2_0/src/execution/transform_kind.rs b/casper_types_ver_2_0/src/execution/transform_kind.rs deleted file mode 100644 index 0c0f6ee4..00000000 --- a/casper_types_ver_2_0/src/execution/transform_kind.rs +++ /dev/null @@ -1,847 +0,0 @@ -use alloc::{string::ToString, vec::Vec}; -use core::{any, convert::TryFrom}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -use num::traits::{AsPrimitive, WrappingAdd}; -#[cfg(any(feature = "testing", test))] -use rand::Rng; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -use super::TransformError; -use crate::{ - addressable_entity::NamedKeys, - bytesrepr::{self, FromBytes, ToBytes, U8_SERIALIZED_LENGTH}, - CLType, CLTyped, CLValue, Key, StoredValue, StoredValueTypeMismatch, U128, U256, U512, -}; - -/// Taxonomy of Transform. -#[derive(PartialEq, Eq, Debug, Clone)] -pub enum TransformInstruction { - /// Store a StoredValue. - Store(StoredValue), - /// Prune a StoredValue by Key. - Prune(Key), -} - -impl TransformInstruction { - /// Store instruction. - pub fn store(stored_value: StoredValue) -> Self { - Self::Store(stored_value) - } - - /// Prune instruction. - pub fn prune(key: Key) -> Self { - Self::Prune(key) - } -} - -impl From for TransformInstruction { - fn from(value: StoredValue) -> Self { - TransformInstruction::Store(value) - } -} - -/// Representation of a single transformation occurring during execution. -/// -/// Note that all arithmetic variants of [`TransformKind`] are commutative which means that a given -/// collection of them can be executed in any order to produce the same end result. -#[derive(Clone, Eq, PartialEq, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[serde(deny_unknown_fields)] -pub enum TransformKind { - /// An identity transformation that does not modify a value in the global state. - /// - /// Created as a result of reading from the global state. - Identity, - /// Writes a new value in the global state. - Write(StoredValue), - /// A wrapping addition of an `i32` to an existing numeric value (not necessarily an `i32`) in - /// the global state. - AddInt32(i32), - /// A wrapping addition of a `u64` to an existing numeric value (not necessarily an `u64`) in - /// the global state. - AddUInt64(u64), - /// A wrapping addition of a `U128` to an existing numeric value (not necessarily an `U128`) in - /// the global state. - AddUInt128(U128), - /// A wrapping addition of a `U256` to an existing numeric value (not necessarily an `U256`) in - /// the global state. - AddUInt256(U256), - /// A wrapping addition of a `U512` to an existing numeric value (not necessarily an `U512`) in - /// the global state. - AddUInt512(U512), - /// Adds new named keys to an existing entry in the global state. - /// - /// This transform assumes that the existing stored value is either an Account or a Contract. - AddKeys(NamedKeys), - /// Removes the pathing to the global state entry of the specified key. The pruned element - /// remains reachable from previously generated global state root hashes, but will not be - /// included in the next generated global state root hash and subsequent state accumulated - /// from it. - Prune(Key), - /// Represents the case where applying a transform would cause an error. - Failure(TransformError), -} - -impl TransformKind { - /// Applies the transformation on a specified stored value instance. - /// - /// This method produces a new `StoredValue` instance based on the `TransformKind` variant. - pub fn apply(self, stored_value: StoredValue) -> Result { - fn store(sv: StoredValue) -> TransformInstruction { - TransformInstruction::Store(sv) - } - match self { - TransformKind::Identity => Ok(store(stored_value)), - TransformKind::Write(new_value) => Ok(store(new_value)), - TransformKind::Prune(key) => Ok(TransformInstruction::prune(key)), - TransformKind::AddInt32(to_add) => wrapping_addition(stored_value, to_add), - TransformKind::AddUInt64(to_add) => wrapping_addition(stored_value, to_add), - TransformKind::AddUInt128(to_add) => wrapping_addition(stored_value, to_add), - TransformKind::AddUInt256(to_add) => wrapping_addition(stored_value, to_add), - TransformKind::AddUInt512(to_add) => wrapping_addition(stored_value, to_add), - TransformKind::AddKeys(keys) => match stored_value { - StoredValue::AddressableEntity(mut entity) => { - entity.named_keys_append(keys); - Ok(store(StoredValue::AddressableEntity(entity))) - } - StoredValue::Account(_) | StoredValue::Contract(_) => { - Err(TransformError::Deprecated) - } - StoredValue::CLValue(cl_value) => { - let expected = "Contract or Account".to_string(); - let found = format!("{:?}", cl_value.cl_type()); - Err(StoredValueTypeMismatch::new(expected, found).into()) - } - StoredValue::Package(_) => { - let expected = "Contract or Account".to_string(); - let found = "ContractPackage".to_string(); - Err(StoredValueTypeMismatch::new(expected, found).into()) - } - StoredValue::ByteCode(_) => { - let expected = "Contract or Account".to_string(); - let found = "ByteCode".to_string(); - Err(StoredValueTypeMismatch::new(expected, found).into()) - } - StoredValue::Transfer(_) => { - let expected = "Contract or Account".to_string(); - let found = "Transfer".to_string(); - Err(StoredValueTypeMismatch::new(expected, found).into()) - } - StoredValue::DeployInfo(_) => { - let expected = "Contract or Account".to_string(); - let found = "DeployInfo".to_string(); - Err(StoredValueTypeMismatch::new(expected, found).into()) - } - StoredValue::EraInfo(_) => { - let expected = "Contract or Account".to_string(); - let found = "EraInfo".to_string(); - Err(StoredValueTypeMismatch::new(expected, found).into()) - } - StoredValue::Bid(_) => { - let expected = "Contract or Account".to_string(); - let found = "Bid".to_string(); - Err(StoredValueTypeMismatch::new(expected, found).into()) - } - StoredValue::BidKind(_) => { - let expected = "Contract or Account".to_string(); - let found = "BidKind".to_string(); - Err(StoredValueTypeMismatch::new(expected, found).into()) - } - StoredValue::Withdraw(_) => { - let expected = "Contract or Account".to_string(); - let found = "Withdraw".to_string(); - Err(StoredValueTypeMismatch::new(expected, found).into()) - } - StoredValue::Unbonding(_) => { - let expected = "Contract or Account".to_string(); - let found = "Unbonding".to_string(); - Err(StoredValueTypeMismatch::new(expected, found).into()) - } - StoredValue::ContractWasm(_) => { - let expected = "Contract or Account".to_string(); - let found = "ContractWasm".to_string(); - Err(StoredValueTypeMismatch::new(expected, found).into()) - } - StoredValue::ContractPackage(_) => { - let expected = "Contract or Account".to_string(); - let found = "ContractPackage".to_string(); - Err(StoredValueTypeMismatch::new(expected, found).into()) - } - StoredValue::MessageTopic(_) => { - let expected = "Contract or Account".to_string(); - let found = "MessageTopic".to_string(); - Err(StoredValueTypeMismatch::new(expected, found).into()) - } - StoredValue::Message(_) => { - let expected = "Contract or Account".to_string(); - let found = "Message".to_string(); - Err(StoredValueTypeMismatch::new(expected, found).into()) - } - }, - TransformKind::Failure(error) => Err(error), - } - } - - /// Returns a random `TransformKind`. - #[cfg(any(feature = "testing", test))] - pub fn random(rng: &mut R) -> Self { - match rng.gen_range(0..10) { - 0 => TransformKind::Identity, - 1 => TransformKind::Write(StoredValue::CLValue(CLValue::from_t(true).unwrap())), - 2 => TransformKind::AddInt32(rng.gen()), - 3 => TransformKind::AddUInt64(rng.gen()), - 4 => TransformKind::AddUInt128(rng.gen::().into()), - 5 => TransformKind::AddUInt256(rng.gen::().into()), - 6 => TransformKind::AddUInt512(rng.gen::().into()), - 7 => { - let mut named_keys = NamedKeys::new(); - for _ in 0..rng.gen_range(1..6) { - named_keys.insert(rng.gen::().to_string(), rng.gen()); - } - TransformKind::AddKeys(named_keys) - } - 8 => TransformKind::Failure(TransformError::Serialization( - bytesrepr::Error::EarlyEndOfStream, - )), - 9 => TransformKind::Prune(rng.gen::()), - _ => unreachable!(), - } - } -} - -impl ToBytes for TransformKind { - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - match self { - TransformKind::Identity => (TransformTag::Identity as u8).write_bytes(writer), - TransformKind::Write(stored_value) => { - (TransformTag::Write as u8).write_bytes(writer)?; - stored_value.write_bytes(writer) - } - TransformKind::AddInt32(value) => { - (TransformTag::AddInt32 as u8).write_bytes(writer)?; - value.write_bytes(writer) - } - TransformKind::AddUInt64(value) => { - (TransformTag::AddUInt64 as u8).write_bytes(writer)?; - value.write_bytes(writer) - } - TransformKind::AddUInt128(value) => { - (TransformTag::AddUInt128 as u8).write_bytes(writer)?; - value.write_bytes(writer) - } - TransformKind::AddUInt256(value) => { - (TransformTag::AddUInt256 as u8).write_bytes(writer)?; - value.write_bytes(writer) - } - TransformKind::AddUInt512(value) => { - (TransformTag::AddUInt512 as u8).write_bytes(writer)?; - value.write_bytes(writer) - } - TransformKind::AddKeys(named_keys) => { - (TransformTag::AddKeys as u8).write_bytes(writer)?; - named_keys.write_bytes(writer) - } - TransformKind::Failure(error) => { - (TransformTag::Failure as u8).write_bytes(writer)?; - error.write_bytes(writer) - } - TransformKind::Prune(value) => { - (TransformTag::Prune as u8).write_bytes(writer)?; - value.write_bytes(writer) - } - } - } - - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - U8_SERIALIZED_LENGTH - + match self { - TransformKind::Identity => 0, - TransformKind::Write(stored_value) => stored_value.serialized_length(), - TransformKind::AddInt32(value) => value.serialized_length(), - TransformKind::AddUInt64(value) => value.serialized_length(), - TransformKind::AddUInt128(value) => value.serialized_length(), - TransformKind::AddUInt256(value) => value.serialized_length(), - TransformKind::AddUInt512(value) => value.serialized_length(), - TransformKind::AddKeys(named_keys) => named_keys.serialized_length(), - TransformKind::Failure(error) => error.serialized_length(), - TransformKind::Prune(value) => value.serialized_length(), - } - } -} - -impl FromBytes for TransformKind { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (tag, remainder) = u8::from_bytes(bytes)?; - match tag { - tag if tag == TransformTag::Identity as u8 => Ok((TransformKind::Identity, remainder)), - tag if tag == TransformTag::Write as u8 => { - let (stored_value, remainder) = StoredValue::from_bytes(remainder)?; - Ok((TransformKind::Write(stored_value), remainder)) - } - tag if tag == TransformTag::AddInt32 as u8 => { - let (value, remainder) = i32::from_bytes(remainder)?; - Ok((TransformKind::AddInt32(value), remainder)) - } - tag if tag == TransformTag::AddUInt64 as u8 => { - let (value, remainder) = u64::from_bytes(remainder)?; - Ok((TransformKind::AddUInt64(value), remainder)) - } - tag if tag == TransformTag::AddUInt128 as u8 => { - let (value, remainder) = U128::from_bytes(remainder)?; - Ok((TransformKind::AddUInt128(value), remainder)) - } - tag if tag == TransformTag::AddUInt256 as u8 => { - let (value, remainder) = U256::from_bytes(remainder)?; - Ok((TransformKind::AddUInt256(value), remainder)) - } - tag if tag == TransformTag::AddUInt512 as u8 => { - let (value, remainder) = U512::from_bytes(remainder)?; - Ok((TransformKind::AddUInt512(value), remainder)) - } - tag if tag == TransformTag::AddKeys as u8 => { - let (named_keys, remainder) = NamedKeys::from_bytes(remainder)?; - Ok((TransformKind::AddKeys(named_keys), remainder)) - } - tag if tag == TransformTag::Failure as u8 => { - let (error, remainder) = TransformError::from_bytes(remainder)?; - Ok((TransformKind::Failure(error), remainder)) - } - tag if tag == TransformTag::Prune as u8 => { - let (key, remainder) = Key::from_bytes(remainder)?; - Ok((TransformKind::Prune(key), remainder)) - } - _ => Err(bytesrepr::Error::Formatting), - } - } -} - -/// Attempts a wrapping addition of `to_add` to `stored_value`, assuming `stored_value` is -/// compatible with type `Y`. -fn wrapping_addition( - stored_value: StoredValue, - to_add: Y, -) -> Result -where - Y: AsPrimitive - + AsPrimitive - + AsPrimitive - + AsPrimitive - + AsPrimitive - + AsPrimitive - + AsPrimitive - + AsPrimitive, -{ - let cl_value = CLValue::try_from(stored_value)?; - - match cl_value.cl_type() { - CLType::I32 => do_wrapping_addition::(cl_value, to_add), - CLType::I64 => do_wrapping_addition::(cl_value, to_add), - CLType::U8 => do_wrapping_addition::(cl_value, to_add), - CLType::U32 => do_wrapping_addition::(cl_value, to_add), - CLType::U64 => do_wrapping_addition::(cl_value, to_add), - CLType::U128 => do_wrapping_addition::(cl_value, to_add), - CLType::U256 => do_wrapping_addition::(cl_value, to_add), - CLType::U512 => do_wrapping_addition::(cl_value, to_add), - other => { - let expected = format!("integral type compatible with {}", any::type_name::()); - let found = format!("{:?}", other); - Err(StoredValueTypeMismatch::new(expected, found).into()) - } - } -} - -/// Attempts a wrapping addition of `to_add` to the value represented by `cl_value`. -fn do_wrapping_addition( - cl_value: CLValue, - to_add: Y, -) -> Result -where - X: WrappingAdd + CLTyped + ToBytes + FromBytes + Copy + 'static, - Y: AsPrimitive, -{ - let x: X = cl_value.into_t()?; - let result = x.wrapping_add(&(to_add.as_())); - let stored_value = StoredValue::CLValue(CLValue::from_t(result)?); - Ok(TransformInstruction::store(stored_value)) -} - -#[derive(Debug)] -#[repr(u8)] -enum TransformTag { - Identity = 0, - Write = 1, - AddInt32 = 2, - AddUInt64 = 3, - AddUInt128 = 4, - AddUInt256 = 5, - AddUInt512 = 6, - AddKeys = 7, - Failure = 8, - Prune = 9, -} - -#[cfg(test)] -mod tests { - use std::{collections::BTreeMap, fmt}; - - use num::{Bounded, Num}; - - use crate::{ - byte_code::ByteCodeKind, bytesrepr::Bytes, testing::TestRng, AccessRights, ByteCode, Key, - URef, U128, U256, U512, - }; - - use super::*; - - const ZERO_ARRAY: [u8; 32] = [0; 32]; - const TEST_STR: &str = "a"; - const TEST_BOOL: bool = true; - - const ZERO_I32: i32 = 0; - const ONE_I32: i32 = 1; - const NEG_ONE_I32: i32 = -1; - const NEG_TWO_I32: i32 = -2; - const MIN_I32: i32 = i32::min_value(); - const MAX_I32: i32 = i32::max_value(); - - const ZERO_I64: i64 = 0; - const ONE_I64: i64 = 1; - const NEG_ONE_I64: i64 = -1; - const NEG_TWO_I64: i64 = -2; - const MIN_I64: i64 = i64::min_value(); - const MAX_I64: i64 = i64::max_value(); - - const ZERO_U8: u8 = 0; - const ONE_U8: u8 = 1; - const MAX_U8: u8 = u8::max_value(); - - const ZERO_U32: u32 = 0; - const ONE_U32: u32 = 1; - const MAX_U32: u32 = u32::max_value(); - - const ZERO_U64: u64 = 0; - const ONE_U64: u64 = 1; - const MAX_U64: u64 = u64::max_value(); - - const ZERO_U128: U128 = U128([0; 2]); - const ONE_U128: U128 = U128([1, 0]); - const MAX_U128: U128 = U128([MAX_U64; 2]); - - const ZERO_U256: U256 = U256([0; 4]); - const ONE_U256: U256 = U256([1, 0, 0, 0]); - const MAX_U256: U256 = U256([MAX_U64; 4]); - - const ZERO_U512: U512 = U512([0; 8]); - const ONE_U512: U512 = U512([1, 0, 0, 0, 0, 0, 0, 0]); - const MAX_U512: U512 = U512([MAX_U64; 8]); - - #[test] - fn i32_overflow() { - let max = std::i32::MAX; - let min = std::i32::MIN; - - let max_value = StoredValue::CLValue(CLValue::from_t(max).unwrap()); - let min_value = StoredValue::CLValue(CLValue::from_t(min).unwrap()); - - let apply_overflow = TransformKind::AddInt32(1).apply(max_value.clone()); - let apply_underflow = TransformKind::AddInt32(-1).apply(min_value.clone()); - - assert_eq!( - apply_overflow.expect("Unexpected overflow"), - TransformInstruction::store(min_value) - ); - assert_eq!( - apply_underflow.expect("Unexpected underflow"), - TransformInstruction::store(max_value) - ); - } - - fn uint_overflow_test() - where - T: Num + Bounded + CLTyped + ToBytes + Into + Copy, - { - let max = T::max_value(); - let min = T::min_value(); - let one = T::one(); - let zero = T::zero(); - - let max_value = StoredValue::CLValue(CLValue::from_t(max).unwrap()); - let min_value = StoredValue::CLValue(CLValue::from_t(min).unwrap()); - let zero_value = StoredValue::CLValue(CLValue::from_t(zero).unwrap()); - - let one_transform: TransformKind = one.into(); - - let apply_overflow = TransformKind::AddInt32(1).apply(max_value.clone()); - - let apply_overflow_uint = one_transform.apply(max_value.clone()); - let apply_underflow = TransformKind::AddInt32(-1).apply(min_value); - - assert_eq!(apply_overflow, Ok(zero_value.clone().into())); - assert_eq!(apply_overflow_uint, Ok(zero_value.into())); - assert_eq!(apply_underflow, Ok(max_value.into())); - } - - #[test] - fn u128_overflow() { - impl From for TransformKind { - fn from(x: U128) -> Self { - TransformKind::AddUInt128(x) - } - } - uint_overflow_test::(); - } - - #[test] - fn u256_overflow() { - impl From for TransformKind { - fn from(x: U256) -> Self { - TransformKind::AddUInt256(x) - } - } - uint_overflow_test::(); - } - - #[test] - fn u512_overflow() { - impl From for TransformKind { - fn from(x: U512) -> Self { - TransformKind::AddUInt512(x) - } - } - uint_overflow_test::(); - } - - #[test] - fn addition_between_mismatched_types_should_fail() { - fn assert_yields_type_mismatch_error(stored_value: StoredValue) { - match wrapping_addition(stored_value, ZERO_I32) { - Err(TransformError::TypeMismatch(_)) => (), - _ => panic!("wrapping addition should yield TypeMismatch error"), - }; - } - - let byte_code = StoredValue::ByteCode(ByteCode::new(ByteCodeKind::V1CasperWasm, vec![])); - assert_yields_type_mismatch_error(byte_code); - - let uref = URef::new(ZERO_ARRAY, AccessRights::READ); - - let cl_bool = - StoredValue::CLValue(CLValue::from_t(TEST_BOOL).expect("should create CLValue")); - assert_yields_type_mismatch_error(cl_bool); - - let cl_unit = StoredValue::CLValue(CLValue::from_t(()).expect("should create CLValue")); - assert_yields_type_mismatch_error(cl_unit); - - let cl_string = - StoredValue::CLValue(CLValue::from_t(TEST_STR).expect("should create CLValue")); - assert_yields_type_mismatch_error(cl_string); - - let cl_key = StoredValue::CLValue( - CLValue::from_t(Key::Hash(ZERO_ARRAY)).expect("should create CLValue"), - ); - assert_yields_type_mismatch_error(cl_key); - - let cl_uref = StoredValue::CLValue(CLValue::from_t(uref).expect("should create CLValue")); - assert_yields_type_mismatch_error(cl_uref); - - let cl_option = - StoredValue::CLValue(CLValue::from_t(Some(ZERO_U8)).expect("should create CLValue")); - assert_yields_type_mismatch_error(cl_option); - - let cl_list = StoredValue::CLValue( - CLValue::from_t(Bytes::from(vec![ZERO_U8])).expect("should create CLValue"), - ); - assert_yields_type_mismatch_error(cl_list); - - let cl_fixed_list = - StoredValue::CLValue(CLValue::from_t([ZERO_U8]).expect("should create CLValue")); - assert_yields_type_mismatch_error(cl_fixed_list); - - let cl_result: Result<(), u8> = Err(ZERO_U8); - let cl_result = - StoredValue::CLValue(CLValue::from_t(cl_result).expect("should create CLValue")); - assert_yields_type_mismatch_error(cl_result); - - let cl_map = StoredValue::CLValue( - CLValue::from_t(BTreeMap::::new()).expect("should create CLValue"), - ); - assert_yields_type_mismatch_error(cl_map); - - let cl_tuple1 = - StoredValue::CLValue(CLValue::from_t((ZERO_U8,)).expect("should create CLValue")); - assert_yields_type_mismatch_error(cl_tuple1); - - let cl_tuple2 = StoredValue::CLValue( - CLValue::from_t((ZERO_U8, ZERO_U8)).expect("should create CLValue"), - ); - assert_yields_type_mismatch_error(cl_tuple2); - - let cl_tuple3 = StoredValue::CLValue( - CLValue::from_t((ZERO_U8, ZERO_U8, ZERO_U8)).expect("should create CLValue"), - ); - assert_yields_type_mismatch_error(cl_tuple3); - } - - #[test] - #[allow(clippy::cognitive_complexity)] - fn wrapping_addition_should_succeed() { - fn add(current_value: X, to_add: Y) -> X - where - X: CLTyped + ToBytes + FromBytes + PartialEq + fmt::Debug, - Y: AsPrimitive - + AsPrimitive - + AsPrimitive - + AsPrimitive - + AsPrimitive - + AsPrimitive - + AsPrimitive - + AsPrimitive, - { - let current = StoredValue::CLValue( - CLValue::from_t(current_value).expect("should create CLValue"), - ); - if let TransformInstruction::Store(result) = - wrapping_addition(current, to_add).expect("wrapping addition should succeed") - { - CLValue::try_from(result) - .expect("should be CLValue") - .into_t() - .expect("should parse to X") - } else { - panic!("expected TransformInstruction::Store"); - } - } - - // Adding to i32 - assert_eq!(ONE_I32, add(ZERO_I32, ONE_I32)); - assert_eq!(MIN_I32, add(MAX_I32, ONE_I32)); - assert_eq!(NEG_TWO_I32, add(MAX_I32, MAX_I32)); - assert_eq!(ZERO_I32, add(ONE_I32, NEG_ONE_I32)); - assert_eq!(NEG_ONE_I32, add(ZERO_I32, NEG_ONE_I32)); - assert_eq!(MAX_I32, add(NEG_ONE_I32, MIN_I32)); - - assert_eq!(ONE_I32, add(ZERO_I32, ONE_U64)); - assert_eq!(MIN_I32, add(MAX_I32, ONE_U64)); - assert_eq!(NEG_TWO_I32, add(MAX_I32, MAX_I32 as u64)); - - assert_eq!(ONE_I32, add(ZERO_I32, ONE_U128)); - assert_eq!(MIN_I32, add(MAX_I32, ONE_U128)); - assert_eq!(NEG_TWO_I32, add(MAX_I32, U128::from(MAX_I32))); - - assert_eq!(ONE_I32, add(ZERO_I32, ONE_U256)); - assert_eq!(MIN_I32, add(MAX_I32, ONE_U256)); - assert_eq!(NEG_TWO_I32, add(MAX_I32, U256::from(MAX_I32))); - - assert_eq!(ONE_I32, add(ZERO_I32, ONE_U512)); - assert_eq!(MIN_I32, add(MAX_I32, ONE_U512)); - assert_eq!(NEG_TWO_I32, add(MAX_I32, U512::from(MAX_I32))); - - // Adding to i64 - assert_eq!(ONE_I64, add(ZERO_I64, ONE_I32)); - assert_eq!(MIN_I64, add(MAX_I64, ONE_I32)); - assert_eq!(ZERO_I64, add(ONE_I64, NEG_ONE_I32)); - assert_eq!(NEG_ONE_I64, add(ZERO_I64, NEG_ONE_I32)); - assert_eq!(MAX_I64, add(MIN_I64, NEG_ONE_I32)); - - assert_eq!(ONE_I64, add(ZERO_I64, ONE_U64)); - assert_eq!(MIN_I64, add(MAX_I64, ONE_U64)); - assert_eq!(NEG_TWO_I64, add(MAX_I64, MAX_I64 as u64)); - - assert_eq!(ONE_I64, add(ZERO_I64, ONE_U128)); - assert_eq!(MIN_I64, add(MAX_I64, ONE_U128)); - assert_eq!(NEG_TWO_I64, add(MAX_I64, U128::from(MAX_I64))); - - assert_eq!(ONE_I64, add(ZERO_I64, ONE_U256)); - assert_eq!(MIN_I64, add(MAX_I64, ONE_U256)); - assert_eq!(NEG_TWO_I64, add(MAX_I64, U256::from(MAX_I64))); - - assert_eq!(ONE_I64, add(ZERO_I64, ONE_U512)); - assert_eq!(MIN_I64, add(MAX_I64, ONE_U512)); - assert_eq!(NEG_TWO_I64, add(MAX_I64, U512::from(MAX_I64))); - - // Adding to u8 - assert_eq!(ONE_U8, add(ZERO_U8, ONE_I32)); - assert_eq!(ZERO_U8, add(MAX_U8, ONE_I32)); - assert_eq!(MAX_U8, add(MAX_U8, 256_i32)); - assert_eq!(ZERO_U8, add(MAX_U8, 257_i32)); - assert_eq!(ZERO_U8, add(ONE_U8, NEG_ONE_I32)); - assert_eq!(MAX_U8, add(ZERO_U8, NEG_ONE_I32)); - assert_eq!(ZERO_U8, add(ZERO_U8, -256_i32)); - assert_eq!(MAX_U8, add(ZERO_U8, -257_i32)); - assert_eq!(MAX_U8, add(ZERO_U8, MAX_I32)); - assert_eq!(ZERO_U8, add(ZERO_U8, MIN_I32)); - - assert_eq!(ONE_U8, add(ZERO_U8, ONE_U64)); - assert_eq!(ZERO_U8, add(MAX_U8, ONE_U64)); - assert_eq!(ONE_U8, add(ZERO_U8, u64::from(MAX_U8) + 2)); - assert_eq!(MAX_U8, add(ZERO_U8, MAX_U64)); - - assert_eq!(ONE_U8, add(ZERO_U8, ONE_U128)); - assert_eq!(ZERO_U8, add(MAX_U8, ONE_U128)); - assert_eq!(ONE_U8, add(ZERO_U8, U128::from(MAX_U8) + 2)); - assert_eq!(MAX_U8, add(ZERO_U8, MAX_U128)); - - assert_eq!(ONE_U8, add(ZERO_U8, ONE_U256)); - assert_eq!(ZERO_U8, add(MAX_U8, ONE_U256)); - assert_eq!(ONE_U8, add(ZERO_U8, U256::from(MAX_U8) + 2)); - assert_eq!(MAX_U8, add(ZERO_U8, MAX_U256)); - - assert_eq!(ONE_U8, add(ZERO_U8, ONE_U512)); - assert_eq!(ZERO_U8, add(MAX_U8, ONE_U512)); - assert_eq!(ONE_U8, add(ZERO_U8, U512::from(MAX_U8) + 2)); - assert_eq!(MAX_U8, add(ZERO_U8, MAX_U512)); - - // Adding to u32 - assert_eq!(ONE_U32, add(ZERO_U32, ONE_I32)); - assert_eq!(ZERO_U32, add(MAX_U32, ONE_I32)); - assert_eq!(ZERO_U32, add(ONE_U32, NEG_ONE_I32)); - assert_eq!(MAX_U32, add(ZERO_U32, NEG_ONE_I32)); - assert_eq!(MAX_I32 as u32 + 1, add(ZERO_U32, MIN_I32)); - - assert_eq!(ONE_U32, add(ZERO_U32, ONE_U64)); - assert_eq!(ZERO_U32, add(MAX_U32, ONE_U64)); - assert_eq!(ONE_U32, add(ZERO_U32, u64::from(MAX_U32) + 2)); - assert_eq!(MAX_U32, add(ZERO_U32, MAX_U64)); - - assert_eq!(ONE_U32, add(ZERO_U32, ONE_U128)); - assert_eq!(ZERO_U32, add(MAX_U32, ONE_U128)); - assert_eq!(ONE_U32, add(ZERO_U32, U128::from(MAX_U32) + 2)); - assert_eq!(MAX_U32, add(ZERO_U32, MAX_U128)); - - assert_eq!(ONE_U32, add(ZERO_U32, ONE_U256)); - assert_eq!(ZERO_U32, add(MAX_U32, ONE_U256)); - assert_eq!(ONE_U32, add(ZERO_U32, U256::from(MAX_U32) + 2)); - assert_eq!(MAX_U32, add(ZERO_U32, MAX_U256)); - - assert_eq!(ONE_U32, add(ZERO_U32, ONE_U512)); - assert_eq!(ZERO_U32, add(MAX_U32, ONE_U512)); - assert_eq!(ONE_U32, add(ZERO_U32, U512::from(MAX_U32) + 2)); - assert_eq!(MAX_U32, add(ZERO_U32, MAX_U512)); - - // Adding to u64 - assert_eq!(ONE_U64, add(ZERO_U64, ONE_I32)); - assert_eq!(ZERO_U64, add(MAX_U64, ONE_I32)); - assert_eq!(ZERO_U64, add(ONE_U64, NEG_ONE_I32)); - assert_eq!(MAX_U64, add(ZERO_U64, NEG_ONE_I32)); - - assert_eq!(ONE_U64, add(ZERO_U64, ONE_U64)); - assert_eq!(ZERO_U64, add(MAX_U64, ONE_U64)); - assert_eq!(MAX_U64 - 1, add(MAX_U64, MAX_U64)); - - assert_eq!(ONE_U64, add(ZERO_U64, ONE_U128)); - assert_eq!(ZERO_U64, add(MAX_U64, ONE_U128)); - assert_eq!(ONE_U64, add(ZERO_U64, U128::from(MAX_U64) + 2)); - assert_eq!(MAX_U64, add(ZERO_U64, MAX_U128)); - - assert_eq!(ONE_U64, add(ZERO_U64, ONE_U256)); - assert_eq!(ZERO_U64, add(MAX_U64, ONE_U256)); - assert_eq!(ONE_U64, add(ZERO_U64, U256::from(MAX_U64) + 2)); - assert_eq!(MAX_U64, add(ZERO_U64, MAX_U256)); - - assert_eq!(ONE_U64, add(ZERO_U64, ONE_U512)); - assert_eq!(ZERO_U64, add(MAX_U64, ONE_U512)); - assert_eq!(ONE_U64, add(ZERO_U64, U512::from(MAX_U64) + 2)); - assert_eq!(MAX_U64, add(ZERO_U64, MAX_U512)); - - // Adding to U128 - assert_eq!(ONE_U128, add(ZERO_U128, ONE_I32)); - assert_eq!(ZERO_U128, add(MAX_U128, ONE_I32)); - assert_eq!(ZERO_U128, add(ONE_U128, NEG_ONE_I32)); - assert_eq!(MAX_U128, add(ZERO_U128, NEG_ONE_I32)); - - assert_eq!(ONE_U128, add(ZERO_U128, ONE_U64)); - assert_eq!(ZERO_U128, add(MAX_U128, ONE_U64)); - - assert_eq!(ONE_U128, add(ZERO_U128, ONE_U128)); - assert_eq!(ZERO_U128, add(MAX_U128, ONE_U128)); - assert_eq!(MAX_U128 - 1, add(MAX_U128, MAX_U128)); - - assert_eq!(ONE_U128, add(ZERO_U128, ONE_U256)); - assert_eq!(ZERO_U128, add(MAX_U128, ONE_U256)); - assert_eq!( - ONE_U128, - add( - ZERO_U128, - U256::from_dec_str(&MAX_U128.to_string()).unwrap() + 2, - ) - ); - assert_eq!(MAX_U128, add(ZERO_U128, MAX_U256)); - - assert_eq!(ONE_U128, add(ZERO_U128, ONE_U512)); - assert_eq!(ZERO_U128, add(MAX_U128, ONE_U512)); - assert_eq!( - ONE_U128, - add( - ZERO_U128, - U512::from_dec_str(&MAX_U128.to_string()).unwrap() + 2, - ) - ); - assert_eq!(MAX_U128, add(ZERO_U128, MAX_U512)); - - // Adding to U256 - assert_eq!(ONE_U256, add(ZERO_U256, ONE_I32)); - assert_eq!(ZERO_U256, add(MAX_U256, ONE_I32)); - assert_eq!(ZERO_U256, add(ONE_U256, NEG_ONE_I32)); - assert_eq!(MAX_U256, add(ZERO_U256, NEG_ONE_I32)); - - assert_eq!(ONE_U256, add(ZERO_U256, ONE_U64)); - assert_eq!(ZERO_U256, add(MAX_U256, ONE_U64)); - - assert_eq!(ONE_U256, add(ZERO_U256, ONE_U128)); - assert_eq!(ZERO_U256, add(MAX_U256, ONE_U128)); - - assert_eq!(ONE_U256, add(ZERO_U256, ONE_U256)); - assert_eq!(ZERO_U256, add(MAX_U256, ONE_U256)); - assert_eq!(MAX_U256 - 1, add(MAX_U256, MAX_U256)); - - assert_eq!(ONE_U256, add(ZERO_U256, ONE_U512)); - assert_eq!(ZERO_U256, add(MAX_U256, ONE_U512)); - assert_eq!( - ONE_U256, - add( - ZERO_U256, - U512::from_dec_str(&MAX_U256.to_string()).unwrap() + 2, - ) - ); - assert_eq!(MAX_U256, add(ZERO_U256, MAX_U512)); - - // Adding to U512 - assert_eq!(ONE_U512, add(ZERO_U512, ONE_I32)); - assert_eq!(ZERO_U512, add(MAX_U512, ONE_I32)); - assert_eq!(ZERO_U512, add(ONE_U512, NEG_ONE_I32)); - assert_eq!(MAX_U512, add(ZERO_U512, NEG_ONE_I32)); - - assert_eq!(ONE_U512, add(ZERO_U512, ONE_U64)); - assert_eq!(ZERO_U512, add(MAX_U512, ONE_U64)); - - assert_eq!(ONE_U512, add(ZERO_U512, ONE_U128)); - assert_eq!(ZERO_U512, add(MAX_U512, ONE_U128)); - - assert_eq!(ONE_U512, add(ZERO_U512, ONE_U256)); - assert_eq!(ZERO_U512, add(MAX_U512, ONE_U256)); - - assert_eq!(ONE_U512, add(ZERO_U512, ONE_U512)); - assert_eq!(ZERO_U512, add(MAX_U512, ONE_U512)); - assert_eq!(MAX_U512 - 1, add(MAX_U512, MAX_U512)); - } - - #[test] - fn bytesrepr_roundtrip() { - let rng = &mut TestRng::new(); - for _ in 0..11 { - let execution_result = TransformKind::random(rng); - bytesrepr::test_serialization_roundtrip(&execution_result); - } - } -} diff --git a/casper_types_ver_2_0/src/file_utils.rs b/casper_types_ver_2_0/src/file_utils.rs deleted file mode 100644 index 775a7315..00000000 --- a/casper_types_ver_2_0/src/file_utils.rs +++ /dev/null @@ -1,77 +0,0 @@ -//! Utilities for handling reading from and writing to files. - -use std::{ - fs, - io::{self, Write}, - os::unix::fs::OpenOptionsExt, - path::{Path, PathBuf}, -}; - -use thiserror::Error; - -/// Error reading a file. -#[derive(Debug, Error)] -#[error("could not read '{0}': {error}", .path.display())] -pub struct ReadFileError { - /// Path that failed to be read. - path: PathBuf, - /// The underlying OS error. - #[source] - error: io::Error, -} - -/// Error writing a file -#[derive(Debug, Error)] -#[error("could not write to '{0}': {error}", .path.display())] -pub struct WriteFileError { - /// Path that failed to be written to. - path: PathBuf, - /// The underlying OS error. - #[source] - error: io::Error, -} - -/// Read complete at `path` into memory. -/// -/// Wraps `fs::read`, but preserves the filename for better error printing. -pub fn read_file>(filename: P) -> Result, ReadFileError> { - let path = filename.as_ref(); - fs::read(path).map_err(|error| ReadFileError { - path: path.to_owned(), - error, - }) -} - -/// Write data to `path`. -/// -/// Wraps `fs::write`, but preserves the filename for better error printing. -pub(crate) fn write_file, B: AsRef<[u8]>>( - filename: P, - data: B, -) -> Result<(), WriteFileError> { - let path = filename.as_ref(); - fs::write(path, data.as_ref()).map_err(|error| WriteFileError { - path: path.to_owned(), - error, - }) -} - -/// Writes data to `path`, ensuring only the owner can read or write it. -/// -/// Otherwise functions like [`write_file`]. -pub(crate) fn write_private_file, B: AsRef<[u8]>>( - filename: P, - data: B, -) -> Result<(), WriteFileError> { - let path = filename.as_ref(); - fs::OpenOptions::new() - .write(true) - .create(true) - .mode(0o600) - .open(path) - .and_then(|mut file| file.write_all(data.as_ref())) - .map_err(|error| WriteFileError { - path: path.to_owned(), - error, - }) -} diff --git a/casper_types_ver_2_0/src/gas.rs b/casper_types_ver_2_0/src/gas.rs deleted file mode 100644 index 7689849e..00000000 --- a/casper_types_ver_2_0/src/gas.rs +++ /dev/null @@ -1,240 +0,0 @@ -//! The `gas` module is used for working with Gas including converting to and from Motes. - -use core::{ - fmt, - iter::Sum, - ops::{Add, AddAssign, Div, Mul, Sub}, -}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -use num::Zero; -use serde::{Deserialize, Serialize}; - -use crate::{Motes, U512}; - -/// The `Gas` struct represents a `U512` amount of gas. -#[derive(Debug, Default, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -pub struct Gas(U512); - -impl Gas { - /// Constructs a new `Gas`. - pub fn new(value: U512) -> Self { - Gas(value) - } - - /// Returns the inner `U512` value. - pub fn value(&self) -> U512 { - self.0 - } - - /// Returns the cost to be charged. - pub fn cost(&self, is_system: bool) -> Self { - if is_system { - return Gas::new(U512::zero()); - } - *self - } - - /// Converts the given `motes` to `Gas` by dividing them by `conv_rate`. - /// - /// Returns `None` if `conv_rate == 0`. - pub fn from_motes(motes: Motes, conv_rate: u64) -> Option { - motes - .value() - .checked_div(U512::from(conv_rate)) - .map(Self::new) - } - - /// Checked integer addition. Computes `self + rhs`, returning `None` if overflow occurred. - pub fn checked_add(&self, rhs: Self) -> Option { - self.0.checked_add(rhs.value()).map(Self::new) - } - - /// Checked integer subtraction. Computes `self - rhs`, returning `None` if overflow occurred. - pub fn checked_sub(&self, rhs: Self) -> Option { - self.0.checked_sub(rhs.value()).map(Self::new) - } -} - -impl fmt::Display for Gas { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{:?}", self.0) - } -} - -impl Add for Gas { - type Output = Gas; - - fn add(self, rhs: Self) -> Self::Output { - let val = self.value() + rhs.value(); - Gas::new(val) - } -} - -impl Sub for Gas { - type Output = Gas; - - fn sub(self, rhs: Self) -> Self::Output { - let val = self.value() - rhs.value(); - Gas::new(val) - } -} - -impl Div for Gas { - type Output = Gas; - - fn div(self, rhs: Self) -> Self::Output { - let val = self.value() / rhs.value(); - Gas::new(val) - } -} - -impl Mul for Gas { - type Output = Gas; - - fn mul(self, rhs: Self) -> Self::Output { - let val = self.value() * rhs.value(); - Gas::new(val) - } -} - -impl AddAssign for Gas { - fn add_assign(&mut self, rhs: Self) { - self.0 += rhs.0 - } -} - -impl Zero for Gas { - fn zero() -> Self { - Gas::new(U512::zero()) - } - - fn is_zero(&self) -> bool { - self.0.is_zero() - } -} - -impl Sum for Gas { - fn sum>(iter: I) -> Self { - iter.fold(Gas::zero(), Add::add) - } -} - -impl From for Gas { - fn from(gas: u32) -> Self { - let gas_u512: U512 = gas.into(); - Gas::new(gas_u512) - } -} - -impl From for Gas { - fn from(gas: u64) -> Self { - let gas_u512: U512 = gas.into(); - Gas::new(gas_u512) - } -} - -#[cfg(test)] -mod tests { - use crate::U512; - - use crate::{Gas, Motes}; - - #[test] - fn should_be_able_to_get_instance_of_gas() { - let initial_value = 1; - let gas = Gas::new(U512::from(initial_value)); - assert_eq!( - initial_value, - gas.value().as_u64(), - "should have equal value" - ) - } - - #[test] - fn should_be_able_to_compare_two_instances_of_gas() { - let left_gas = Gas::new(U512::from(1)); - let right_gas = Gas::new(U512::from(1)); - assert_eq!(left_gas, right_gas, "should be equal"); - let right_gas = Gas::new(U512::from(2)); - assert_ne!(left_gas, right_gas, "should not be equal") - } - - #[test] - fn should_be_able_to_add_two_instances_of_gas() { - let left_gas = Gas::new(U512::from(1)); - let right_gas = Gas::new(U512::from(1)); - let expected_gas = Gas::new(U512::from(2)); - assert_eq!((left_gas + right_gas), expected_gas, "should be equal") - } - - #[test] - fn should_be_able_to_subtract_two_instances_of_gas() { - let left_gas = Gas::new(U512::from(1)); - let right_gas = Gas::new(U512::from(1)); - let expected_gas = Gas::new(U512::from(0)); - assert_eq!((left_gas - right_gas), expected_gas, "should be equal") - } - - #[test] - fn should_be_able_to_multiply_two_instances_of_gas() { - let left_gas = Gas::new(U512::from(100)); - let right_gas = Gas::new(U512::from(10)); - let expected_gas = Gas::new(U512::from(1000)); - assert_eq!((left_gas * right_gas), expected_gas, "should be equal") - } - - #[test] - fn should_be_able_to_divide_two_instances_of_gas() { - let left_gas = Gas::new(U512::from(1000)); - let right_gas = Gas::new(U512::from(100)); - let expected_gas = Gas::new(U512::from(10)); - assert_eq!((left_gas / right_gas), expected_gas, "should be equal") - } - - #[test] - fn should_be_able_to_convert_from_mote() { - let mote = Motes::new(U512::from(100)); - let gas = Gas::from_motes(mote, 10).expect("should have gas"); - let expected_gas = Gas::new(U512::from(10)); - assert_eq!(gas, expected_gas, "should be equal") - } - - #[test] - fn should_be_able_to_default() { - let gas = Gas::default(); - let expected_gas = Gas::new(U512::from(0)); - assert_eq!(gas, expected_gas, "should be equal") - } - - #[test] - fn should_be_able_to_compare_relative_value() { - let left_gas = Gas::new(U512::from(100)); - let right_gas = Gas::new(U512::from(10)); - assert!(left_gas > right_gas, "should be gt"); - let right_gas = Gas::new(U512::from(100)); - assert!(left_gas >= right_gas, "should be gte"); - assert!(left_gas <= right_gas, "should be lte"); - let left_gas = Gas::new(U512::from(10)); - assert!(left_gas < right_gas, "should be lt"); - } - - #[test] - fn should_default() { - let left_gas = Gas::new(U512::from(0)); - let right_gas = Gas::default(); - assert_eq!(left_gas, right_gas, "should be equal"); - let u512 = U512::zero(); - assert_eq!(left_gas.value(), u512, "should be equal"); - } - - #[test] - fn should_support_checked_div_from_motes() { - let motes = Motes::new(U512::zero()); - let conv_rate = 0; - let maybe = Gas::from_motes(motes, conv_rate); - assert!(maybe.is_none(), "should be none due to divide by zero"); - } -} diff --git a/casper_types_ver_2_0/src/gens.rs b/casper_types_ver_2_0/src/gens.rs deleted file mode 100644 index ac09ad12..00000000 --- a/casper_types_ver_2_0/src/gens.rs +++ /dev/null @@ -1,738 +0,0 @@ -//! Contains functions for generating arbitrary values for use by -//! [`Proptest`](https://crates.io/crates/proptest). -#![allow(missing_docs)] - -use alloc::{ - boxed::Box, - collections::{BTreeMap, BTreeSet}, - string::String, - vec, -}; - -use proptest::{ - array, bits, bool, - collection::{self, SizeRange}, - option, - prelude::*, - result, -}; - -use crate::{ - account::{self, action_thresholds::gens::account_action_thresholds_arb, AccountHash}, - addressable_entity::{MessageTopics, NamedKeys, Parameters, Weight}, - contract_messages::{MessageChecksum, MessageTopicSummary, TopicNameHash}, - crypto::{self, gens::public_key_arb_no_system}, - package::{EntityVersionKey, EntityVersions, Groups, PackageStatus}, - system::auction::{ - gens::era_info_arb, DelegationRate, Delegator, UnbondingPurse, WithdrawPurse, - DELEGATION_RATE_DENOMINATOR, - }, - transfer::TransferAddr, - AccessRights, AddressableEntity, AddressableEntityHash, BlockTime, ByteCode, CLType, CLValue, - EntryPoint, EntryPointAccess, EntryPointType, EntryPoints, EraId, Group, Key, NamedArg, - Package, Parameter, Phase, ProtocolVersion, SemVer, StoredValue, URef, U128, U256, U512, -}; - -use crate::{ - account::{associated_keys::gens::account_associated_keys_arb, Account}, - addressable_entity::{ - action_thresholds::gens::action_thresholds_arb, associated_keys::gens::associated_keys_arb, - }, - byte_code::ByteCodeKind, - contracts::{ - Contract, ContractHash, ContractPackage, ContractPackageStatus, ContractVersionKey, - ContractVersions, - }, - deploy_info::gens::{deploy_hash_arb, transfer_addr_arb}, - package::PackageKind, - system::auction::{Bid, BidAddr, BidKind, ValidatorBid}, -}; -pub use crate::{deploy_info::gens::deploy_info_arb, transfer::gens::transfer_arb}; - -pub fn u8_slice_32() -> impl Strategy { - collection::vec(any::(), 32).prop_map(|b| { - let mut res = [0u8; 32]; - res.clone_from_slice(b.as_slice()); - res - }) -} - -pub fn u2_slice_32() -> impl Strategy { - array::uniform32(any::()).prop_map(|mut arr| { - for byte in arr.iter_mut() { - *byte &= 0b11; - } - arr - }) -} - -pub(crate) fn named_keys_arb(depth: usize) -> impl Strategy { - collection::btree_map("\\PC*", key_arb(), depth).prop_map(NamedKeys::from) -} - -pub fn access_rights_arb() -> impl Strategy { - prop_oneof![ - Just(AccessRights::NONE), - Just(AccessRights::READ), - Just(AccessRights::ADD), - Just(AccessRights::WRITE), - Just(AccessRights::READ_ADD), - Just(AccessRights::READ_WRITE), - Just(AccessRights::ADD_WRITE), - Just(AccessRights::READ_ADD_WRITE), - ] -} - -pub fn phase_arb() -> impl Strategy { - prop_oneof![ - Just(Phase::Payment), - Just(Phase::Session), - Just(Phase::FinalizePayment), - ] -} - -pub fn uref_arb() -> impl Strategy { - (array::uniform32(bits::u8::ANY), access_rights_arb()) - .prop_map(|(id, access_rights)| URef::new(id, access_rights)) -} - -pub fn era_id_arb() -> impl Strategy { - any::().prop_map(EraId::from) -} - -pub fn key_arb() -> impl Strategy { - prop_oneof![ - account_hash_arb().prop_map(Key::Account), - u8_slice_32().prop_map(Key::Hash), - uref_arb().prop_map(Key::URef), - transfer_addr_arb().prop_map(Key::Transfer), - deploy_hash_arb().prop_map(Key::DeployInfo), - era_id_arb().prop_map(Key::EraInfo), - uref_arb().prop_map(|uref| Key::Balance(uref.addr())), - bid_addr_validator_arb().prop_map(Key::BidAddr), - bid_addr_delegator_arb().prop_map(Key::BidAddr), - account_hash_arb().prop_map(Key::Withdraw), - u8_slice_32().prop_map(Key::Dictionary), - Just(Key::EraSummary), - ] -} - -pub fn colliding_key_arb() -> impl Strategy { - prop_oneof![ - u2_slice_32().prop_map(|bytes| Key::Account(AccountHash::new(bytes))), - u2_slice_32().prop_map(Key::Hash), - u2_slice_32().prop_map(|bytes| Key::URef(URef::new(bytes, AccessRights::NONE))), - u2_slice_32().prop_map(|bytes| Key::Transfer(TransferAddr::new(bytes))), - u2_slice_32().prop_map(Key::Dictionary), - ] -} - -pub fn account_hash_arb() -> impl Strategy { - u8_slice_32().prop_map(AccountHash::new) -} - -pub fn bid_addr_validator_arb() -> impl Strategy { - u8_slice_32().prop_map(BidAddr::new_validator_addr) -} - -pub fn bid_addr_delegator_arb() -> impl Strategy { - let x = u8_slice_32(); - let y = u8_slice_32(); - (x, y).prop_map(BidAddr::new_delegator_addr) -} - -pub fn weight_arb() -> impl Strategy { - any::().prop_map(Weight::new) -} - -pub fn account_weight_arb() -> impl Strategy { - any::().prop_map(account::Weight::new) -} - -pub fn sem_ver_arb() -> impl Strategy { - (any::(), any::(), any::()) - .prop_map(|(major, minor, patch)| SemVer::new(major, minor, patch)) -} - -pub fn protocol_version_arb() -> impl Strategy { - sem_ver_arb().prop_map(ProtocolVersion::new) -} - -pub fn u128_arb() -> impl Strategy { - collection::vec(any::(), 0..16).prop_map(|b| U128::from_little_endian(b.as_slice())) -} - -pub fn u256_arb() -> impl Strategy { - collection::vec(any::(), 0..32).prop_map(|b| U256::from_little_endian(b.as_slice())) -} - -pub fn u512_arb() -> impl Strategy { - prop_oneof![ - 1 => Just(U512::zero()), - 8 => collection::vec(any::(), 0..64).prop_map(|b| U512::from_little_endian(b.as_slice())), - 1 => Just(U512::MAX), - ] -} - -pub fn cl_simple_type_arb() -> impl Strategy { - prop_oneof![ - Just(CLType::Bool), - Just(CLType::I32), - Just(CLType::I64), - Just(CLType::U8), - Just(CLType::U32), - Just(CLType::U64), - Just(CLType::U128), - Just(CLType::U256), - Just(CLType::U512), - Just(CLType::Unit), - Just(CLType::String), - Just(CLType::Key), - Just(CLType::URef), - ] -} - -pub fn cl_type_arb() -> impl Strategy { - cl_simple_type_arb().prop_recursive(4, 16, 8, |element| { - prop_oneof![ - // We want to produce basic types too - element.clone(), - // For complex type - element - .clone() - .prop_map(|val| CLType::Option(Box::new(val))), - element.clone().prop_map(|val| CLType::List(Box::new(val))), - // Realistic Result type generator: ok is anything recursive, err is simple type - (element.clone(), cl_simple_type_arb()).prop_map(|(ok, err)| CLType::Result { - ok: Box::new(ok), - err: Box::new(err) - }), - // Realistic Map type generator: key is simple type, value is complex recursive type - (cl_simple_type_arb(), element.clone()).prop_map(|(key, value)| CLType::Map { - key: Box::new(key), - value: Box::new(value) - }), - // Various tuples - element - .clone() - .prop_map(|cl_type| CLType::Tuple1([Box::new(cl_type)])), - (element.clone(), element.clone()).prop_map(|(cl_type1, cl_type2)| CLType::Tuple2([ - Box::new(cl_type1), - Box::new(cl_type2) - ])), - (element.clone(), element.clone(), element).prop_map( - |(cl_type1, cl_type2, cl_type3)| CLType::Tuple3([ - Box::new(cl_type1), - Box::new(cl_type2), - Box::new(cl_type3) - ]) - ), - ] - }) -} - -pub fn cl_value_arb() -> impl Strategy { - // If compiler brings you here it most probably means you've added a variant to `CLType` enum - // but forgot to add generator for it. - let stub: Option = None; - if let Some(cl_type) = stub { - match cl_type { - CLType::Bool - | CLType::I32 - | CLType::I64 - | CLType::U8 - | CLType::U32 - | CLType::U64 - | CLType::U128 - | CLType::U256 - | CLType::U512 - | CLType::Unit - | CLType::String - | CLType::Key - | CLType::URef - | CLType::PublicKey - | CLType::Option(_) - | CLType::List(_) - | CLType::ByteArray(..) - | CLType::Result { .. } - | CLType::Map { .. } - | CLType::Tuple1(_) - | CLType::Tuple2(_) - | CLType::Tuple3(_) - | CLType::Any => (), - } - }; - - prop_oneof![ - Just(CLValue::from_t(()).expect("should create CLValue")), - any::().prop_map(|x| CLValue::from_t(x).expect("should create CLValue")), - any::().prop_map(|x| CLValue::from_t(x).expect("should create CLValue")), - any::().prop_map(|x| CLValue::from_t(x).expect("should create CLValue")), - any::().prop_map(|x| CLValue::from_t(x).expect("should create CLValue")), - any::().prop_map(|x| CLValue::from_t(x).expect("should create CLValue")), - any::().prop_map(|x| CLValue::from_t(x).expect("should create CLValue")), - u128_arb().prop_map(|x| CLValue::from_t(x).expect("should create CLValue")), - u256_arb().prop_map(|x| CLValue::from_t(x).expect("should create CLValue")), - u512_arb().prop_map(|x| CLValue::from_t(x).expect("should create CLValue")), - key_arb().prop_map(|x| CLValue::from_t(x).expect("should create CLValue")), - uref_arb().prop_map(|x| CLValue::from_t(x).expect("should create CLValue")), - ".*".prop_map(|x: String| CLValue::from_t(x).expect("should create CLValue")), - option::of(any::()).prop_map(|x| CLValue::from_t(x).expect("should create CLValue")), - collection::vec(uref_arb(), 0..100) - .prop_map(|x| CLValue::from_t(x).expect("should create CLValue")), - result::maybe_err(key_arb(), ".*") - .prop_map(|x| CLValue::from_t(x).expect("should create CLValue")), - collection::btree_map(".*", u512_arb(), 0..100) - .prop_map(|x| CLValue::from_t(x).expect("should create CLValue")), - (any::()).prop_map(|x| CLValue::from_t(x).expect("should create CLValue")), - (any::(), any::()) - .prop_map(|x| CLValue::from_t(x).expect("should create CLValue")), - (any::(), any::(), any::()) - .prop_map(|x| CLValue::from_t(x).expect("should create CLValue")), - // Fixed lists of any size - any::().prop_map(|len| CLValue::from_t([len; 32]).expect("should create CLValue")), - ] -} - -pub fn result_arb() -> impl Strategy> { - result::maybe_ok(any::(), any::()) -} - -pub fn named_args_arb() -> impl Strategy { - (".*", cl_value_arb()).prop_map(|(name, value)| NamedArg::new(name, value)) -} - -pub fn group_arb() -> impl Strategy { - ".*".prop_map(Group::new) -} - -pub fn entry_point_access_arb() -> impl Strategy { - prop_oneof![ - Just(EntryPointAccess::Public), - collection::vec(group_arb(), 0..32).prop_map(EntryPointAccess::Groups), - Just(EntryPointAccess::Template), - ] -} - -pub fn entry_point_type_arb() -> impl Strategy { - prop_oneof![ - Just(EntryPointType::Session), - Just(EntryPointType::AddressableEntity), - Just(EntryPointType::Factory), - ] -} - -pub fn parameter_arb() -> impl Strategy { - (".*", cl_type_arb()).prop_map(|(name, cl_type)| Parameter::new(name, cl_type)) -} - -pub fn parameters_arb() -> impl Strategy { - collection::vec(parameter_arb(), 0..10) -} - -pub fn entry_point_arb() -> impl Strategy { - ( - ".*", - parameters_arb(), - entry_point_type_arb(), - entry_point_access_arb(), - cl_type_arb(), - ) - .prop_map( - |(name, parameters, entry_point_type, entry_point_access, ret)| { - EntryPoint::new(name, parameters, ret, entry_point_access, entry_point_type) - }, - ) -} - -pub fn entry_points_arb() -> impl Strategy { - collection::vec(entry_point_arb(), 1..10).prop_map(EntryPoints::from) -} - -pub fn message_topics_arb() -> impl Strategy { - collection::vec(any::(), 1..100).prop_map(|topic_names| { - MessageTopics::from( - topic_names - .into_iter() - .map(|name| { - let name_hash = crypto::blake2b(&name).into(); - (name, name_hash) - }) - .collect::>(), - ) - }) -} - -pub fn account_arb() -> impl Strategy { - ( - account_hash_arb(), - named_keys_arb(20), - uref_arb(), - account_associated_keys_arb(), - account_action_thresholds_arb(), - ) - .prop_map( - |(account_hash, named_keys, main_purse, associated_keys, action_thresholds)| { - Account::new( - account_hash, - named_keys, - main_purse, - associated_keys, - action_thresholds, - ) - }, - ) -} - -pub fn contract_package_arb() -> impl Strategy { - ( - uref_arb(), - contract_versions_arb(), - disabled_contract_versions_arb(), - groups_arb(), - ) - .prop_map(|(access_key, versions, disabled_versions, groups)| { - ContractPackage::new( - access_key, - versions, - disabled_versions, - groups, - ContractPackageStatus::default(), - ) - }) -} - -pub fn contract_arb() -> impl Strategy { - ( - protocol_version_arb(), - entry_points_arb(), - u8_slice_32(), - u8_slice_32(), - named_keys_arb(20), - ) - .prop_map( - |( - protocol_version, - entry_points, - contract_package_hash_arb, - contract_wasm_hash, - named_keys, - )| { - Contract::new( - contract_package_hash_arb.into(), - contract_wasm_hash.into(), - named_keys, - entry_points, - protocol_version, - ) - }, - ) -} - -pub fn addressable_entity_arb() -> impl Strategy { - ( - protocol_version_arb(), - entry_points_arb(), - u8_slice_32(), - u8_slice_32(), - named_keys_arb(20), - uref_arb(), - associated_keys_arb(), - action_thresholds_arb(), - message_topics_arb(), - ) - .prop_map( - |( - protocol_version, - entry_points, - contract_package_hash_arb, - contract_wasm_hash, - named_keys, - main_purse, - associated_keys, - action_thresholds, - message_topics, - )| { - AddressableEntity::new( - contract_package_hash_arb.into(), - contract_wasm_hash.into(), - named_keys, - entry_points, - protocol_version, - main_purse, - associated_keys, - action_thresholds, - message_topics, - ) - }, - ) -} - -pub fn byte_code_arb() -> impl Strategy { - collection::vec(any::(), 1..1000) - .prop_map(|byte_code| ByteCode::new(ByteCodeKind::V1CasperWasm, byte_code)) -} - -pub fn contract_version_key_arb() -> impl Strategy { - (1..32u32, 1..1000u32) - .prop_map(|(major, contract_ver)| ContractVersionKey::new(major, contract_ver)) -} - -pub fn entity_version_key_arb() -> impl Strategy { - (1..32u32, 1..1000u32) - .prop_map(|(major, contract_ver)| EntityVersionKey::new(major, contract_ver)) -} - -pub fn contract_versions_arb() -> impl Strategy { - collection::btree_map( - contract_version_key_arb(), - u8_slice_32().prop_map(ContractHash::new), - 1..5, - ) -} - -pub fn entity_versions_arb() -> impl Strategy { - collection::btree_map( - entity_version_key_arb(), - u8_slice_32().prop_map(AddressableEntityHash::new), - 1..5, - ) - .prop_map(EntityVersions::from) -} - -pub fn disabled_versions_arb() -> impl Strategy> { - collection::btree_set(entity_version_key_arb(), 0..5) -} - -pub fn disabled_contract_versions_arb() -> impl Strategy> { - collection::btree_set(contract_version_key_arb(), 0..5) -} - -pub fn groups_arb() -> impl Strategy { - collection::btree_map(group_arb(), collection::btree_set(uref_arb(), 1..10), 0..5) - .prop_map(Groups::from) -} - -pub fn package_arb() -> impl Strategy { - ( - uref_arb(), - entity_versions_arb(), - disabled_versions_arb(), - groups_arb(), - ) - .prop_map(|(access_key, versions, disabled_versions, groups)| { - Package::new( - access_key, - versions, - disabled_versions, - groups, - PackageStatus::default(), - PackageKind::SmartContract, - ) - }) -} - -pub(crate) fn delegator_arb() -> impl Strategy { - ( - public_key_arb_no_system(), - u512_arb(), - uref_arb(), - public_key_arb_no_system(), - ) - .prop_map( - |(delegator_pk, staked_amount, bonding_purse, validator_pk)| { - Delegator::unlocked(delegator_pk, staked_amount, bonding_purse, validator_pk) - }, - ) -} - -fn delegation_rate_arb() -> impl Strategy { - 0..=DELEGATION_RATE_DENOMINATOR // Maximum, allowed value for delegation rate. -} - -pub(crate) fn unified_bid_arb( - delegations_len: impl Into, -) -> impl Strategy { - ( - public_key_arb_no_system(), - uref_arb(), - u512_arb(), - delegation_rate_arb(), - bool::ANY, - collection::vec(delegator_arb(), delegations_len), - ) - .prop_map( - |( - validator_public_key, - bonding_purse, - staked_amount, - delegation_rate, - is_locked, - new_delegators, - )| { - let mut bid = if is_locked { - Bid::locked( - validator_public_key, - bonding_purse, - staked_amount, - delegation_rate, - 1u64, - ) - } else { - Bid::unlocked( - validator_public_key, - bonding_purse, - staked_amount, - delegation_rate, - ) - }; - let delegators = bid.delegators_mut(); - new_delegators.into_iter().for_each(|delegator| { - assert!(delegators - .insert(delegator.delegator_public_key().clone(), delegator) - .is_none()); - }); - BidKind::Unified(Box::new(bid)) - }, - ) -} - -pub(crate) fn delegator_bid_arb() -> impl Strategy { - (delegator_arb()).prop_map(|delegator| BidKind::Delegator(Box::new(delegator))) -} - -pub(crate) fn validator_bid_arb() -> impl Strategy { - ( - public_key_arb_no_system(), - uref_arb(), - u512_arb(), - delegation_rate_arb(), - bool::ANY, - ) - .prop_map( - |(validator_public_key, bonding_purse, staked_amount, delegation_rate, is_locked)| { - let validator_bid = if is_locked { - ValidatorBid::locked( - validator_public_key, - bonding_purse, - staked_amount, - delegation_rate, - 1u64, - ) - } else { - ValidatorBid::unlocked( - validator_public_key, - bonding_purse, - staked_amount, - delegation_rate, - ) - }; - BidKind::Validator(Box::new(validator_bid)) - }, - ) -} - -fn withdraw_arb() -> impl Strategy { - ( - uref_arb(), - public_key_arb_no_system(), - public_key_arb_no_system(), - era_id_arb(), - u512_arb(), - ) - .prop_map(|(bonding_purse, validator_pk, unbonder_pk, era, amount)| { - WithdrawPurse::new(bonding_purse, validator_pk, unbonder_pk, era, amount) - }) -} - -fn withdraws_arb(size: impl Into) -> impl Strategy> { - collection::vec(withdraw_arb(), size) -} - -fn unbonding_arb() -> impl Strategy { - ( - uref_arb(), - public_key_arb_no_system(), - public_key_arb_no_system(), - era_id_arb(), - u512_arb(), - option::of(public_key_arb_no_system()), - ) - .prop_map( - |( - bonding_purse, - validator_public_key, - unbonder_public_key, - era, - amount, - new_validator, - )| { - UnbondingPurse::new( - bonding_purse, - validator_public_key, - unbonder_public_key, - era, - amount, - new_validator, - ) - }, - ) -} - -fn unbondings_arb(size: impl Into) -> impl Strategy> { - collection::vec(unbonding_arb(), size) -} - -fn message_topic_summary_arb() -> impl Strategy { - (any::(), any::()).prop_map(|(message_count, blocktime)| MessageTopicSummary { - message_count, - blocktime: BlockTime::new(blocktime), - }) -} - -fn message_summary_arb() -> impl Strategy { - u8_slice_32().prop_map(MessageChecksum) -} - -pub fn stored_value_arb() -> impl Strategy { - prop_oneof![ - cl_value_arb().prop_map(StoredValue::CLValue), - account_arb().prop_map(StoredValue::Account), - byte_code_arb().prop_map(StoredValue::ByteCode), - contract_arb().prop_map(StoredValue::Contract), - addressable_entity_arb().prop_map(StoredValue::AddressableEntity), - package_arb().prop_map(StoredValue::Package), - transfer_arb().prop_map(StoredValue::Transfer), - deploy_info_arb().prop_map(StoredValue::DeployInfo), - era_info_arb(1..10).prop_map(StoredValue::EraInfo), - unified_bid_arb(0..3).prop_map(StoredValue::BidKind), - validator_bid_arb().prop_map(StoredValue::BidKind), - delegator_bid_arb().prop_map(StoredValue::BidKind), - withdraws_arb(1..50).prop_map(StoredValue::Withdraw), - unbondings_arb(1..50).prop_map(StoredValue::Unbonding), - message_topic_summary_arb().prop_map(StoredValue::MessageTopic), - message_summary_arb().prop_map(StoredValue::Message), - ] - .prop_map(|stored_value| - // The following match statement is here only to make sure - // we don't forget to update the generator when a new variant is added. - match stored_value { - StoredValue::CLValue(_) => stored_value, - StoredValue::Account(_) => stored_value, - StoredValue::ContractWasm(_) => stored_value, - StoredValue::Contract(_) => stored_value, - StoredValue::ContractPackage(_) => stored_value, - StoredValue::Transfer(_) => stored_value, - StoredValue::DeployInfo(_) => stored_value, - StoredValue::EraInfo(_) => stored_value, - StoredValue::Bid(_) => stored_value, - StoredValue::Withdraw(_) => stored_value, - StoredValue::Unbonding(_) => stored_value, - StoredValue::AddressableEntity(_) => stored_value, - StoredValue::BidKind(_) => stored_value, - StoredValue::Package(_) => stored_value, - StoredValue::ByteCode(_) => stored_value, - StoredValue::MessageTopic(_) => stored_value, - StoredValue::Message(_) => stored_value, - }) -} diff --git a/casper_types_ver_2_0/src/json_pretty_printer.rs b/casper_types_ver_2_0/src/json_pretty_printer.rs deleted file mode 100644 index 3648d38c..00000000 --- a/casper_types_ver_2_0/src/json_pretty_printer.rs +++ /dev/null @@ -1,291 +0,0 @@ -extern crate alloc; - -use alloc::{format, string::String, vec::Vec}; - -use serde::Serialize; -use serde_json::{json, Value}; - -const MAX_STRING_LEN: usize = 150; - -/// Represents the information about a substring found in a string. -#[derive(Debug)] -struct SubstringSpec { - /// Index of the first character. - start_index: usize, - /// Length of the substring. - length: usize, -} - -impl SubstringSpec { - /// Constructs a new StringSpec with the given start index and length. - fn new(start_index: usize, length: usize) -> Self { - Self { - start_index, - length, - } - } -} - -/// Serializes the given data structure as a pretty-printed `String` of JSON using -/// `serde_json::to_string_pretty()`, but after first reducing any large hex-string values. -/// -/// A large hex-string is one containing only hex characters and which is over `MAX_STRING_LEN`. -/// Such hex-strings will be replaced by an indication of the number of chars redacted, for example -/// `[130 hex chars]`. -pub fn json_pretty_print(value: &T) -> serde_json::Result -where - T: ?Sized + Serialize, -{ - let mut json_value = json!(value); - shorten_string_field(&mut json_value); - - serde_json::to_string_pretty(&json_value) -} - -/// Searches the given string for all occurrences of hex substrings -/// that are longer than the specified `max_len`. -fn find_hex_strings_longer_than(string: &str, max_len: usize) -> Vec { - let mut ranges_to_remove = Vec::new(); - let mut start_index = 0; - let mut contiguous_hex_count = 0; - - // Record all large hex-strings' start positions and lengths. - for (index, char) in string.char_indices() { - if char.is_ascii_hexdigit() { - if contiguous_hex_count == 0 { - // This is the start of a new hex-string. - start_index = index; - } - contiguous_hex_count += 1; - } else if contiguous_hex_count != 0 { - // This is the end of a hex-string: if it's too long, record it. - if contiguous_hex_count > max_len { - ranges_to_remove.push(SubstringSpec::new(start_index, contiguous_hex_count)); - } - contiguous_hex_count = 0; - } - } - // If the string contains a large hex-string at the end, record it now. - if contiguous_hex_count > max_len { - ranges_to_remove.push(SubstringSpec::new(start_index, contiguous_hex_count)); - } - ranges_to_remove -} - -fn shorten_string_field(value: &mut Value) { - match value { - Value::String(string) => { - // Iterate over the ranges to remove from last to first so each - // replacement start index remains valid. - find_hex_strings_longer_than(string, MAX_STRING_LEN) - .into_iter() - .rev() - .for_each( - |SubstringSpec { - start_index, - length, - }| { - let range = start_index..(start_index + length); - string.replace_range(range, &format!("[{} hex chars]", length)); - }, - ) - } - Value::Array(values) => { - for value in values { - shorten_string_field(value); - } - } - Value::Object(map) => { - for map_value in map.values_mut() { - shorten_string_field(map_value); - } - } - Value::Null | Value::Bool(_) | Value::Number(_) => {} - } -} - -#[cfg(test)] -mod tests { - use super::*; - - fn hex_string(length: usize) -> String { - "0123456789abcdef".chars().cycle().take(length).collect() - } - - impl PartialEq<(usize, usize)> for SubstringSpec { - fn eq(&self, other: &(usize, usize)) -> bool { - self.start_index == other.0 && self.length == other.1 - } - } - - #[test] - fn finds_hex_strings_longer_than() { - const TESTING_LEN: usize = 3; - - let input = "01234"; - let expected = vec![(0, 5)]; - let actual = find_hex_strings_longer_than(input, TESTING_LEN); - assert_eq!(actual, expected); - - let input = "01234-0123"; - let expected = vec![(0, 5), (6, 4)]; - let actual = find_hex_strings_longer_than(input, TESTING_LEN); - assert_eq!(actual, expected); - - let input = "012-34-0123"; - let expected = vec![(7, 4)]; - let actual = find_hex_strings_longer_than(input, TESTING_LEN); - assert_eq!(actual, expected); - - let input = "012-34-01-23"; - let expected: Vec<(usize, usize)> = vec![]; - let actual = find_hex_strings_longer_than(input, TESTING_LEN); - assert_eq!(actual, expected); - - let input = "0"; - let expected: Vec<(usize, usize)> = vec![]; - let actual = find_hex_strings_longer_than(input, TESTING_LEN); - assert_eq!(actual, expected); - - let input = ""; - let expected: Vec<(usize, usize)> = vec![]; - let actual = find_hex_strings_longer_than(input, TESTING_LEN); - assert_eq!(actual, expected); - } - - #[test] - fn respects_length() { - let input = "I like beef"; - let expected = vec![(7, 4)]; - let actual = find_hex_strings_longer_than(input, 3); - assert_eq!(actual, expected); - - let input = "I like beef"; - let expected: Vec<(usize, usize)> = vec![]; - let actual = find_hex_strings_longer_than(input, 1000); - assert_eq!(actual, expected); - } - - #[test] - fn should_shorten_long_strings() { - let max_unshortened_hex_string = hex_string(MAX_STRING_LEN); - let long_hex_string = hex_string(MAX_STRING_LEN + 1); - let long_non_hex_string: String = "g".repeat(MAX_STRING_LEN + 1); - let long_hex_substring = format!("a-{}-b", hex_string(MAX_STRING_LEN + 1)); - let multiple_long_hex_substrings = - format!("a: {0}, b: {0}, c: {0}", hex_string(MAX_STRING_LEN + 1)); - - let mut long_strings: Vec = vec![]; - for i in 1..=5 { - long_strings.push("a".repeat(MAX_STRING_LEN + i)); - } - let value = json!({ - "field_1": Option::::None, - "field_2": true, - "field_3": 123, - "field_4": max_unshortened_hex_string, - "field_5": ["short string value", long_hex_string], - "field_6": { - "f1": Option::::None, - "f2": false, - "f3": -123, - "f4": long_non_hex_string, - "f5": ["short string value", long_hex_substring], - "f6": { - "final long string": multiple_long_hex_substrings - } - } - }); - - let expected = r#"{ - "field_1": null, - "field_2": true, - "field_3": 123, - "field_4": "0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef012345", - "field_5": [ - "short string value", - "[151 hex chars]" - ], - "field_6": { - "f1": null, - "f2": false, - "f3": -123, - "f4": "ggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggg", - "f5": [ - "short string value", - "a-[151 hex chars]-b" - ], - "f6": { - "final long string": "a: [151 hex chars], b: [151 hex chars], c: [151 hex chars]" - } - } -}"#; - - let output = json_pretty_print(&value).unwrap(); - assert_eq!( - output, expected, - "Actual:\n{}\nExpected:\n{}\n", - output, expected - ); - } - - #[test] - fn should_not_modify_short_strings() { - let max_string: String = "a".repeat(MAX_STRING_LEN); - let value = json!({ - "field_1": Option::::None, - "field_2": true, - "field_3": 123, - "field_4": max_string, - "field_5": [ - "short string value", - "another short string" - ], - "field_6": { - "f1": Option::::None, - "f2": false, - "f3": -123, - "f4": "short", - "f5": [ - "short string value", - "another short string" - ], - "f6": { - "final string": "the last short string" - } - } - }); - - let expected = serde_json::to_string_pretty(&value).unwrap(); - let output = json_pretty_print(&value).unwrap(); - assert_eq!( - output, expected, - "Actual:\n{}\nExpected:\n{}\n", - output, expected - ); - } - - #[test] - /// Ref: https://github.com/casper-network/casper-node/issues/1456 - fn regression_1456() { - let long_string = r#"state query failed: ValueNotFound("Failed to find base key at path: Key::Account(72698d4dc715a28347b15920b09b4f0f1d633be5a33f4686d06992415b0825e2)")"#; - assert_eq!(long_string.len(), 148); - - let value = json!({ - "code": -32003, - "message": long_string, - }); - - let expected = r#"{ - "code": -32003, - "message": "state query failed: ValueNotFound(\"Failed to find base key at path: Key::Account(72698d4dc715a28347b15920b09b4f0f1d633be5a33f4686d06992415b0825e2)\")" -}"#; - - let output = json_pretty_print(&value).unwrap(); - assert_eq!( - output, expected, - "Actual:\n{}\nExpected:\n{}\n", - output, expected - ); - } -} diff --git a/casper_types_ver_2_0/src/key.rs b/casper_types_ver_2_0/src/key.rs deleted file mode 100644 index eebc0f85..00000000 --- a/casper_types_ver_2_0/src/key.rs +++ /dev/null @@ -1,2172 +0,0 @@ -//! Key types. - -use alloc::{ - format, - string::{String, ToString}, - vec::Vec, -}; - -use core::{ - convert::TryFrom, - fmt::{self, Debug, Display, Formatter}, - str::FromStr, -}; - -#[cfg(test)] -use crate::testing::TestRng; - -#[cfg(doc)] -use crate::CLValue; -use blake2::{ - digest::{Update, VariableOutput}, - VarBlake2b, -}; -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(any(feature = "testing", test))] -use rand::{ - distributions::{Distribution, Standard}, - Rng, -}; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{de::Error as SerdeError, Deserialize, Deserializer, Serialize, Serializer}; - -use crate::{ - account::{AccountHash, ACCOUNT_HASH_LENGTH}, - addressable_entity, - addressable_entity::AddressableEntityHash, - byte_code::ByteCodeKind, - bytesrepr::{ - self, Error, FromBytes, ToBytes, U32_SERIALIZED_LENGTH, U64_SERIALIZED_LENGTH, - U8_SERIALIZED_LENGTH, - }, - checksummed_hex, - contract_messages::{self, MessageAddr, TopicNameHash, TOPIC_NAME_HASH_LENGTH}, - contract_wasm::ContractWasmHash, - contracts::{ContractHash, ContractPackageHash}, - package::{PackageHash, PackageKindTag}, - system::auction::{BidAddr, BidAddrTag}, - uref::{self, URef, URefAddr, UREF_SERIALIZED_LENGTH}, - DeployHash, Digest, EraId, Tagged, TransferAddr, TransferFromStrError, TRANSFER_ADDR_LENGTH, - UREF_ADDR_LENGTH, -}; - -const HASH_PREFIX: &str = "hash-"; -const DEPLOY_INFO_PREFIX: &str = "deploy-"; -const ERA_INFO_PREFIX: &str = "era-"; -const BALANCE_PREFIX: &str = "balance-"; -const BID_PREFIX: &str = "bid-"; -const WITHDRAW_PREFIX: &str = "withdraw-"; -const DICTIONARY_PREFIX: &str = "dictionary-"; -const UNBOND_PREFIX: &str = "unbond-"; -const SYSTEM_CONTRACT_REGISTRY_PREFIX: &str = "system-contract-registry-"; -const ERA_SUMMARY_PREFIX: &str = "era-summary-"; -const CHAINSPEC_REGISTRY_PREFIX: &str = "chainspec-registry-"; -const CHECKSUM_REGISTRY_PREFIX: &str = "checksum-registry-"; -const BID_ADDR_PREFIX: &str = "bid-addr-"; -const PACKAGE_PREFIX: &str = "package-"; -const ENTITY_PREFIX: &str = "addressable-entity-"; -const ACCOUNT_ENTITY_PREFIX: &str = "account-"; -const CONTRACT_ENTITY_PREFIX: &str = "contract-"; -const SYSTEM_ENTITY_PREFIX: &str = "system-"; -const BYTE_CODE_PREFIX: &str = "byte-code-"; -const V1_WASM_PREFIX: &str = "v1-wasm-"; -const EMPTY_PREFIX: &str = "empty-"; - -/// The number of bytes in a Blake2b hash -pub const BLAKE2B_DIGEST_LENGTH: usize = 32; -/// The number of bytes in a [`Key::Hash`]. -pub const KEY_HASH_LENGTH: usize = 32; -/// The number of bytes in a [`Key::Transfer`]. -pub const KEY_TRANSFER_LENGTH: usize = TRANSFER_ADDR_LENGTH; -/// The number of bytes in a [`Key::DeployInfo`]. -pub const KEY_DEPLOY_INFO_LENGTH: usize = DeployHash::LENGTH; -/// The number of bytes in a [`Key::Dictionary`]. -pub const KEY_DICTIONARY_LENGTH: usize = 32; -/// The maximum length for a `dictionary_item_key`. -pub const DICTIONARY_ITEM_KEY_MAX_LENGTH: usize = 128; -/// The maximum length for an `Addr`. -pub const ADDR_LENGTH: usize = 32; -const PADDING_BYTES: [u8; 32] = [0u8; 32]; -const KEY_ID_SERIALIZED_LENGTH: usize = 1; -// u8 used to determine the ID -const KEY_HASH_SERIALIZED_LENGTH: usize = KEY_ID_SERIALIZED_LENGTH + KEY_HASH_LENGTH; -const KEY_UREF_SERIALIZED_LENGTH: usize = KEY_ID_SERIALIZED_LENGTH + UREF_SERIALIZED_LENGTH; -const KEY_TRANSFER_SERIALIZED_LENGTH: usize = KEY_ID_SERIALIZED_LENGTH + KEY_TRANSFER_LENGTH; -const KEY_DEPLOY_INFO_SERIALIZED_LENGTH: usize = KEY_ID_SERIALIZED_LENGTH + KEY_DEPLOY_INFO_LENGTH; -const KEY_ERA_INFO_SERIALIZED_LENGTH: usize = KEY_ID_SERIALIZED_LENGTH + U64_SERIALIZED_LENGTH; -const KEY_BALANCE_SERIALIZED_LENGTH: usize = KEY_ID_SERIALIZED_LENGTH + UREF_ADDR_LENGTH; -const KEY_BID_SERIALIZED_LENGTH: usize = KEY_ID_SERIALIZED_LENGTH + KEY_HASH_LENGTH; -const KEY_WITHDRAW_SERIALIZED_LENGTH: usize = KEY_ID_SERIALIZED_LENGTH + KEY_HASH_LENGTH; -const KEY_UNBOND_SERIALIZED_LENGTH: usize = KEY_ID_SERIALIZED_LENGTH + KEY_HASH_LENGTH; -const KEY_DICTIONARY_SERIALIZED_LENGTH: usize = KEY_ID_SERIALIZED_LENGTH + KEY_DICTIONARY_LENGTH; -const KEY_SYSTEM_CONTRACT_REGISTRY_SERIALIZED_LENGTH: usize = - KEY_ID_SERIALIZED_LENGTH + PADDING_BYTES.len(); -const KEY_ERA_SUMMARY_SERIALIZED_LENGTH: usize = KEY_ID_SERIALIZED_LENGTH + PADDING_BYTES.len(); -const KEY_CHAINSPEC_REGISTRY_SERIALIZED_LENGTH: usize = - KEY_ID_SERIALIZED_LENGTH + PADDING_BYTES.len(); -const KEY_CHECKSUM_REGISTRY_SERIALIZED_LENGTH: usize = - KEY_ID_SERIALIZED_LENGTH + PADDING_BYTES.len(); -const KEY_PACKAGE_SERIALIZED_LENGTH: usize = KEY_ID_SERIALIZED_LENGTH + 32; -const KEY_MESSAGE_SERIALIZED_LENGTH: usize = KEY_ID_SERIALIZED_LENGTH - + KEY_HASH_LENGTH - + TOPIC_NAME_HASH_LENGTH - + U8_SERIALIZED_LENGTH - + U32_SERIALIZED_LENGTH; - -const MAX_SERIALIZED_LENGTH: usize = KEY_MESSAGE_SERIALIZED_LENGTH; - -/// An alias for [`Key`]s hash variant. -pub type HashAddr = [u8; KEY_HASH_LENGTH]; - -/// An alias for [`Key`]s package variant. -pub type PackageAddr = [u8; ADDR_LENGTH]; - -/// An alias for [`Key`]s entity variant. -pub type EntityAddr = [u8; ADDR_LENGTH]; - -/// An alias for [`Key`]s byte code variant. -pub type ByteCodeAddr = [u8; ADDR_LENGTH]; - -/// An alias for [`Key`]s dictionary variant. -pub type DictionaryAddr = [u8; KEY_DICTIONARY_LENGTH]; - -#[allow(missing_docs)] -#[derive(Debug, Copy, Clone, PartialOrd, Ord, PartialEq, Eq, Hash)] -#[repr(u8)] -pub enum KeyTag { - Account = 0, - Hash = 1, - URef = 2, - Transfer = 3, - DeployInfo = 4, - EraInfo = 5, - Balance = 6, - Bid = 7, - Withdraw = 8, - Dictionary = 9, - SystemContractRegistry = 10, - EraSummary = 11, - Unbond = 12, - ChainspecRegistry = 13, - ChecksumRegistry = 14, - BidAddr = 15, - Package = 16, - AddressableEntity = 17, - ByteCode = 18, - Message = 19, -} - -impl KeyTag { - #[cfg(test)] - pub(crate) fn random(rng: &mut TestRng) -> Self { - match rng.gen_range(0..20) { - 0 => KeyTag::Account, - 1 => KeyTag::Hash, - 2 => KeyTag::URef, - 3 => KeyTag::Transfer, - 4 => KeyTag::DeployInfo, - 5 => KeyTag::EraInfo, - 6 => KeyTag::Balance, - 7 => KeyTag::Bid, - 8 => KeyTag::Withdraw, - 9 => KeyTag::Dictionary, - 10 => KeyTag::SystemContractRegistry, - 11 => KeyTag::EraSummary, - 12 => KeyTag::Unbond, - 13 => KeyTag::ChainspecRegistry, - 14 => KeyTag::ChecksumRegistry, - 15 => KeyTag::BidAddr, - 16 => KeyTag::Package, - 17 => KeyTag::AddressableEntity, - 18 => KeyTag::ByteCode, - 19 => KeyTag::Message, - _ => panic!(), - } - } -} - -impl Display for KeyTag { - fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { - match self { - KeyTag::Account => write!(f, "Account"), - KeyTag::Hash => write!(f, "Hash"), - KeyTag::URef => write!(f, "URef"), - KeyTag::Transfer => write!(f, "Transfer"), - KeyTag::DeployInfo => write!(f, "DeployInfo"), - KeyTag::EraInfo => write!(f, "EraInfo"), - KeyTag::Balance => write!(f, "Balance"), - KeyTag::Bid => write!(f, "Bid"), - KeyTag::Withdraw => write!(f, "Withdraw"), - KeyTag::Dictionary => write!(f, "Dictionary"), - KeyTag::SystemContractRegistry => write!(f, "SystemContractRegistry"), - KeyTag::EraSummary => write!(f, "EraSummary"), - KeyTag::Unbond => write!(f, "Unbond"), - KeyTag::ChainspecRegistry => write!(f, "ChainspecRegistry"), - KeyTag::ChecksumRegistry => write!(f, "ChecksumRegistry"), - KeyTag::BidAddr => write!(f, "BidAddr"), - KeyTag::Package => write!(f, "Package"), - KeyTag::AddressableEntity => write!(f, "AddressableEntity"), - KeyTag::ByteCode => write!(f, "ByteCode"), - KeyTag::Message => write!(f, "Message"), - } - } -} - -impl ToBytes for KeyTag { - fn to_bytes(&self) -> Result, Error> { - let mut result = bytesrepr::unchecked_allocate_buffer(self); - self.write_bytes(&mut result)?; - Ok(result) - } - - fn serialized_length(&self) -> usize { - KEY_ID_SERIALIZED_LENGTH - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), Error> { - writer.push(*self as u8); - Ok(()) - } -} - -impl FromBytes for KeyTag { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (id, rem) = u8::from_bytes(bytes)?; - let tag = match id { - tag if tag == KeyTag::Account as u8 => KeyTag::Account, - tag if tag == KeyTag::Hash as u8 => KeyTag::Hash, - tag if tag == KeyTag::URef as u8 => KeyTag::URef, - tag if tag == KeyTag::Transfer as u8 => KeyTag::Transfer, - tag if tag == KeyTag::DeployInfo as u8 => KeyTag::DeployInfo, - tag if tag == KeyTag::EraInfo as u8 => KeyTag::EraInfo, - tag if tag == KeyTag::Balance as u8 => KeyTag::Balance, - tag if tag == KeyTag::Bid as u8 => KeyTag::Bid, - tag if tag == KeyTag::Withdraw as u8 => KeyTag::Withdraw, - tag if tag == KeyTag::Dictionary as u8 => KeyTag::Dictionary, - tag if tag == KeyTag::SystemContractRegistry as u8 => KeyTag::SystemContractRegistry, - tag if tag == KeyTag::EraSummary as u8 => KeyTag::EraSummary, - tag if tag == KeyTag::Unbond as u8 => KeyTag::Unbond, - tag if tag == KeyTag::ChainspecRegistry as u8 => KeyTag::ChainspecRegistry, - tag if tag == KeyTag::ChecksumRegistry as u8 => KeyTag::ChecksumRegistry, - tag if tag == KeyTag::BidAddr as u8 => KeyTag::BidAddr, - tag if tag == KeyTag::Package as u8 => KeyTag::Package, - tag if tag == KeyTag::AddressableEntity as u8 => KeyTag::AddressableEntity, - tag if tag == KeyTag::ByteCode as u8 => KeyTag::ByteCode, - tag if tag == KeyTag::Message as u8 => KeyTag::Message, - _ => return Err(Error::Formatting), - }; - Ok((tag, rem)) - } -} - -/// The key under which data (e.g. [`CLValue`]s, smart contracts, user accounts) are stored in -/// global state. -#[repr(C)] -#[derive(PartialEq, Eq, Clone, Copy, PartialOrd, Ord, Hash)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -pub enum Key { - /// A `Key` under which a user account is stored. - Account(AccountHash), - /// A `Key` under which a smart contract is stored and which is the pseudo-hash of the - /// contract. - Hash(HashAddr), - /// A `Key` which is a [`URef`], under which most types of data can be stored. - URef(URef), - /// A `Key` under which a transfer is stored. - Transfer(TransferAddr), - /// A `Key` under which a deploy info is stored. - DeployInfo(DeployHash), - /// A `Key` under which an era info is stored. - EraInfo(EraId), - /// A `Key` under which a purse balance is stored. - Balance(URefAddr), - /// A `Key` under which bid information is stored. - Bid(AccountHash), - /// A `Key` under which withdraw information is stored. - Withdraw(AccountHash), - /// A `Key` whose value is derived by hashing a [`URef`] address and arbitrary data, under - /// which a dictionary is stored. - Dictionary(DictionaryAddr), - /// A `Key` under which system contract hashes are stored. - SystemContractRegistry, - /// A `Key` under which current era info is stored. - EraSummary, - /// A `Key` under which unbond information is stored. - Unbond(AccountHash), - /// A `Key` under which chainspec and other hashes are stored. - ChainspecRegistry, - /// A `Key` under which a registry of checksums is stored. - ChecksumRegistry, - /// A `Key` under which bid information is stored. - BidAddr(BidAddr), - /// A `Key` under which package information is stored. - Package(PackageAddr), - /// A `Key` under which an addressable entity is stored. - AddressableEntity(PackageKindTag, EntityAddr), - /// A `Key` under which a byte code record is stored. - ByteCode(ByteCodeKind, ByteCodeAddr), - /// A `Key` under which a message is stored. - Message(MessageAddr), -} - -#[cfg(feature = "json-schema")] -impl JsonSchema for Key { - fn schema_name() -> String { - String::from("Key") - } - - fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { - let schema = gen.subschema_for::(); - let mut schema_object = schema.into_object(); - schema_object.metadata().description = Some( - "The key as a formatted string, under which data (e.g. `CLValue`s, smart contracts, \ - user accounts) are stored in global state." - .to_string(), - ); - schema_object.into() - } -} - -/// Errors produced when converting a `String` into a `Key`. -#[derive(Debug)] -#[non_exhaustive] -pub enum FromStrError { - /// Account parse error. - Account(addressable_entity::FromStrError), - /// Hash parse error. - Hash(String), - /// URef parse error. - URef(uref::FromStrError), - /// Transfer parse error. - Transfer(TransferFromStrError), - /// DeployInfo parse error. - DeployInfo(String), - /// EraInfo parse error. - EraInfo(String), - /// Balance parse error. - Balance(String), - /// Bid parse error. - Bid(String), - /// Withdraw parse error. - Withdraw(String), - /// Dictionary parse error. - Dictionary(String), - /// System contract registry parse error. - SystemContractRegistry(String), - /// Era summary parse error. - EraSummary(String), - /// Unbond parse error. - Unbond(String), - /// Chainspec registry error. - ChainspecRegistry(String), - /// Checksum registry error. - ChecksumRegistry(String), - /// Bid parse error. - BidAddr(String), - /// Package parse error. - Package(String), - /// Entity parse error. - AddressableEntity(String), - /// Byte code parse error. - ByteCode(String), - /// Message parse error. - Message(contract_messages::FromStrError), - /// Unknown prefix. - UnknownPrefix, -} - -impl From for FromStrError { - fn from(error: addressable_entity::FromStrError) -> Self { - FromStrError::Account(error) - } -} - -impl From for FromStrError { - fn from(error: TransferFromStrError) -> Self { - FromStrError::Transfer(error) - } -} - -impl From for FromStrError { - fn from(error: uref::FromStrError) -> Self { - FromStrError::URef(error) - } -} - -impl From for FromStrError { - fn from(error: contract_messages::FromStrError) -> Self { - FromStrError::Message(error) - } -} - -impl Display for FromStrError { - fn fmt(&self, f: &mut Formatter) -> fmt::Result { - match self { - FromStrError::Account(error) => write!(f, "account-key from string error: {}", error), - FromStrError::Hash(error) => write!(f, "hash-key from string error: {}", error), - FromStrError::URef(error) => write!(f, "uref-key from string error: {}", error), - FromStrError::Transfer(error) => write!(f, "transfer-key from string error: {}", error), - FromStrError::DeployInfo(error) => { - write!(f, "deploy-info-key from string error: {}", error) - } - FromStrError::EraInfo(error) => write!(f, "era-info-key from string error: {}", error), - FromStrError::Balance(error) => write!(f, "balance-key from string error: {}", error), - FromStrError::Bid(error) => write!(f, "bid-key from string error: {}", error), - FromStrError::Withdraw(error) => write!(f, "withdraw-key from string error: {}", error), - FromStrError::Dictionary(error) => { - write!(f, "dictionary-key from string error: {}", error) - } - FromStrError::SystemContractRegistry(error) => { - write!( - f, - "system-contract-registry-key from string error: {}", - error - ) - } - FromStrError::EraSummary(error) => { - write!(f, "era-summary-key from string error: {}", error) - } - FromStrError::Unbond(error) => { - write!(f, "unbond-key from string error: {}", error) - } - FromStrError::ChainspecRegistry(error) => { - write!(f, "chainspec-registry-key from string error: {}", error) - } - FromStrError::ChecksumRegistry(error) => { - write!(f, "checksum-registry-key from string error: {}", error) - } - FromStrError::BidAddr(error) => write!(f, "bid-addr-key from string error: {}", error), - FromStrError::Package(error) => write!(f, "package-key from string error: {}", error), - FromStrError::AddressableEntity(error) => { - write!(f, "addressable-entity-key from string error: {}", error) - } - FromStrError::ByteCode(error) => { - write!(f, "byte-code-key from string error: {}", error) - } - FromStrError::Message(error) => { - write!(f, "message-key from string error: {}", error) - } - FromStrError::UnknownPrefix => write!(f, "unknown prefix for key"), - } - } -} - -impl Key { - // This method is not intended to be used by third party crates. - #[doc(hidden)] - pub fn type_string(&self) -> String { - match self { - Key::Account(_) => String::from("Key::Account"), - Key::Hash(_) => String::from("Key::Hash"), - Key::URef(_) => String::from("Key::URef"), - Key::Transfer(_) => String::from("Key::Transfer"), - Key::DeployInfo(_) => String::from("Key::DeployInfo"), - Key::EraInfo(_) => String::from("Key::EraInfo"), - Key::Balance(_) => String::from("Key::Balance"), - Key::Bid(_) => String::from("Key::Bid"), - Key::Withdraw(_) => String::from("Key::Unbond"), - Key::Dictionary(_) => String::from("Key::Dictionary"), - Key::SystemContractRegistry => String::from("Key::SystemContractRegistry"), - Key::EraSummary => String::from("Key::EraSummary"), - Key::Unbond(_) => String::from("Key::Unbond"), - Key::ChainspecRegistry => String::from("Key::ChainspecRegistry"), - Key::ChecksumRegistry => String::from("Key::ChecksumRegistry"), - Key::BidAddr(_) => String::from("Key::BidAddr"), - Key::Package(_) => String::from("Key::Package"), - Key::AddressableEntity(..) => String::from("Key::AddressableEntity"), - Key::ByteCode(..) => String::from("Key::ByteCode"), - Key::Message(_) => String::from("Key::Message"), - } - } - - /// Returns the maximum size a [`Key`] can be serialized into. - pub const fn max_serialized_length() -> usize { - MAX_SERIALIZED_LENGTH - } - - /// If `self` is of type [`Key::URef`], returns `self` with the - /// [`AccessRights`](crate::AccessRights) stripped from the wrapped [`URef`], otherwise - /// returns `self` unmodified. - #[must_use] - pub fn normalize(self) -> Key { - match self { - Key::URef(uref) => Key::URef(uref.remove_access_rights()), - other => other, - } - } - - /// Returns a human-readable version of `self`, with the inner bytes encoded to Base16. - pub fn to_formatted_string(self) -> String { - match self { - Key::Account(account_hash) => account_hash.to_formatted_string(), - Key::Hash(addr) => format!("{}{}", HASH_PREFIX, base16::encode_lower(&addr)), - Key::URef(uref) => uref.to_formatted_string(), - Key::Transfer(transfer_addr) => transfer_addr.to_formatted_string(), - Key::DeployInfo(addr) => { - format!( - "{}{}", - DEPLOY_INFO_PREFIX, - base16::encode_lower(addr.as_ref()) - ) - } - Key::EraInfo(era_id) => { - format!("{}{}", ERA_INFO_PREFIX, era_id.value()) - } - Key::Balance(uref_addr) => { - format!("{}{}", BALANCE_PREFIX, base16::encode_lower(&uref_addr)) - } - Key::Bid(account_hash) => { - format!("{}{}", BID_PREFIX, base16::encode_lower(&account_hash)) - } - Key::Withdraw(account_hash) => { - format!("{}{}", WITHDRAW_PREFIX, base16::encode_lower(&account_hash)) - } - Key::Dictionary(dictionary_addr) => { - format!( - "{}{}", - DICTIONARY_PREFIX, - base16::encode_lower(&dictionary_addr) - ) - } - Key::SystemContractRegistry => { - format!( - "{}{}", - SYSTEM_CONTRACT_REGISTRY_PREFIX, - base16::encode_lower(&PADDING_BYTES) - ) - } - Key::EraSummary => { - format!( - "{}{}", - ERA_SUMMARY_PREFIX, - base16::encode_lower(&PADDING_BYTES) - ) - } - Key::Unbond(account_hash) => { - format!("{}{}", UNBOND_PREFIX, base16::encode_lower(&account_hash)) - } - Key::ChainspecRegistry => { - format!( - "{}{}", - CHAINSPEC_REGISTRY_PREFIX, - base16::encode_lower(&PADDING_BYTES) - ) - } - Key::ChecksumRegistry => { - format!( - "{}{}", - CHECKSUM_REGISTRY_PREFIX, - base16::encode_lower(&PADDING_BYTES) - ) - } - Key::BidAddr(bid_addr) => { - format!("{}{}", BID_ADDR_PREFIX, bid_addr) - } - Key::Message(message_addr) => message_addr.to_formatted_string(), - Key::Package(package_addr) => { - format!("{}{}", PACKAGE_PREFIX, base16::encode_lower(&package_addr)) - } - Key::AddressableEntity(package_tag, entity_addr) => match package_tag { - PackageKindTag::System => { - format!( - "{}{}{}", - ENTITY_PREFIX, - SYSTEM_ENTITY_PREFIX, - base16::encode_lower(&entity_addr) - ) - } - PackageKindTag::Account => { - format!( - "{}{}{}", - ENTITY_PREFIX, - ACCOUNT_ENTITY_PREFIX, - base16::encode_lower(&entity_addr) - ) - } - PackageKindTag::SmartContract => { - format!( - "{}{}{}", - ENTITY_PREFIX, - CONTRACT_ENTITY_PREFIX, - base16::encode_lower(&entity_addr) - ) - } - }, - Key::ByteCode(byte_code_kind, byte_code_addr) => match byte_code_kind { - ByteCodeKind::Empty => { - format!( - "{}{}{}", - BYTE_CODE_PREFIX, - EMPTY_PREFIX, - base16::encode_lower(&byte_code_addr) - ) - } - ByteCodeKind::V1CasperWasm => { - format!( - "{}{}{}", - BYTE_CODE_PREFIX, - V1_WASM_PREFIX, - base16::encode_lower(&byte_code_addr) - ) - } - }, - } - } - - /// Parses a string formatted as per `Self::to_formatted_string()` into a `Key`. - pub fn from_formatted_str(input: &str) -> Result { - match AccountHash::from_formatted_str(input) { - Ok(account_hash) => return Ok(Key::Account(account_hash)), - Err(addressable_entity::FromStrError::InvalidPrefix) => {} - Err(error) => return Err(error.into()), - } - - if let Some(hex) = input.strip_prefix(HASH_PREFIX) { - let addr = checksummed_hex::decode(hex) - .map_err(|error| FromStrError::Hash(error.to_string()))?; - let hash_addr = HashAddr::try_from(addr.as_ref()) - .map_err(|error| FromStrError::Hash(error.to_string()))?; - return Ok(Key::Hash(hash_addr)); - } - - if let Some(hex) = input.strip_prefix(DEPLOY_INFO_PREFIX) { - let hash = checksummed_hex::decode(hex) - .map_err(|error| FromStrError::DeployInfo(error.to_string()))?; - let hash_array = <[u8; DeployHash::LENGTH]>::try_from(hash.as_ref()) - .map_err(|error| FromStrError::DeployInfo(error.to_string()))?; - return Ok(Key::DeployInfo(DeployHash::new(Digest::from(hash_array)))); - } - - match TransferAddr::from_formatted_str(input) { - Ok(transfer_addr) => return Ok(Key::Transfer(transfer_addr)), - Err(TransferFromStrError::InvalidPrefix) => {} - Err(error) => return Err(error.into()), - } - - match URef::from_formatted_str(input) { - Ok(uref) => return Ok(Key::URef(uref)), - Err(uref::FromStrError::InvalidPrefix) => {} - Err(error) => return Err(error.into()), - } - - if let Some(era_summary_padding) = input.strip_prefix(ERA_SUMMARY_PREFIX) { - let padded_bytes = checksummed_hex::decode(era_summary_padding) - .map_err(|error| FromStrError::EraSummary(error.to_string()))?; - let _padding: [u8; 32] = TryFrom::try_from(padded_bytes.as_ref()).map_err(|_| { - FromStrError::EraSummary("Failed to deserialize era summary key".to_string()) - })?; - return Ok(Key::EraSummary); - } - - if let Some(era_id_str) = input.strip_prefix(ERA_INFO_PREFIX) { - let era_id = EraId::from_str(era_id_str) - .map_err(|error| FromStrError::EraInfo(error.to_string()))?; - return Ok(Key::EraInfo(era_id)); - } - - if let Some(hex) = input.strip_prefix(BALANCE_PREFIX) { - let addr = checksummed_hex::decode(hex) - .map_err(|error| FromStrError::Balance(error.to_string()))?; - let uref_addr = URefAddr::try_from(addr.as_ref()) - .map_err(|error| FromStrError::Balance(error.to_string()))?; - return Ok(Key::Balance(uref_addr)); - } - - // note: BID_ADDR must come before BID as their heads overlap (bid- / bid-addr-) - if let Some(hex) = input.strip_prefix(BID_ADDR_PREFIX) { - let bytes = checksummed_hex::decode(hex) - .map_err(|error| FromStrError::BidAddr(error.to_string()))?; - if bytes.is_empty() { - return Err(FromStrError::BidAddr( - "bytes should not be 0 len".to_string(), - )); - } - let tag_bytes = <[u8; BidAddrTag::BID_ADDR_TAG_LENGTH]>::try_from(bytes[0..1].as_ref()) - .map_err(|err| FromStrError::BidAddr(err.to_string()))?; - let tag = BidAddrTag::try_from_u8(tag_bytes[0]) - .ok_or_else(|| FromStrError::BidAddr("failed to parse bid addr tag".to_string()))?; - let validator_bytes = <[u8; ACCOUNT_HASH_LENGTH]>::try_from( - bytes[1..BidAddr::VALIDATOR_BID_ADDR_LENGTH].as_ref(), - ) - .map_err(|err| FromStrError::BidAddr(err.to_string()))?; - - let bid_addr = { - if tag == BidAddrTag::Unified { - BidAddr::legacy(validator_bytes) - } else if tag == BidAddrTag::Validator { - BidAddr::new_validator_addr(validator_bytes) - } else if tag == BidAddrTag::Delegator { - let delegator_bytes = <[u8; ACCOUNT_HASH_LENGTH]>::try_from( - bytes[BidAddr::VALIDATOR_BID_ADDR_LENGTH..].as_ref(), - ) - .map_err(|err| FromStrError::BidAddr(err.to_string()))?; - BidAddr::new_delegator_addr((validator_bytes, delegator_bytes)) - } else { - return Err(FromStrError::BidAddr("invalid tag".to_string())); - } - }; - return Ok(Key::BidAddr(bid_addr)); - } - - if let Some(hex) = input.strip_prefix(BID_PREFIX) { - let hash = checksummed_hex::decode(hex) - .map_err(|error| FromStrError::Bid(error.to_string()))?; - let account_hash = <[u8; ACCOUNT_HASH_LENGTH]>::try_from(hash.as_ref()) - .map_err(|error| FromStrError::Bid(error.to_string()))?; - return Ok(Key::Bid(AccountHash::new(account_hash))); - } - - if let Some(hex) = input.strip_prefix(WITHDRAW_PREFIX) { - let hash = checksummed_hex::decode(hex) - .map_err(|error| FromStrError::Withdraw(error.to_string()))?; - let account_hash = <[u8; ACCOUNT_HASH_LENGTH]>::try_from(hash.as_ref()) - .map_err(|error| FromStrError::Withdraw(error.to_string()))?; - return Ok(Key::Withdraw(AccountHash::new(account_hash))); - } - - if let Some(hex) = input.strip_prefix(UNBOND_PREFIX) { - let hash = checksummed_hex::decode(hex) - .map_err(|error| FromStrError::Unbond(error.to_string()))?; - let account_hash = <[u8; ACCOUNT_HASH_LENGTH]>::try_from(hash.as_ref()) - .map_err(|error| FromStrError::Unbond(error.to_string()))?; - return Ok(Key::Unbond(AccountHash::new(account_hash))); - } - - if let Some(dictionary_addr) = input.strip_prefix(DICTIONARY_PREFIX) { - let dictionary_addr_bytes = checksummed_hex::decode(dictionary_addr) - .map_err(|error| FromStrError::Dictionary(error.to_string()))?; - let addr = DictionaryAddr::try_from(dictionary_addr_bytes.as_ref()) - .map_err(|error| FromStrError::Dictionary(error.to_string()))?; - return Ok(Key::Dictionary(addr)); - } - - if let Some(registry_address) = input.strip_prefix(SYSTEM_CONTRACT_REGISTRY_PREFIX) { - let padded_bytes = checksummed_hex::decode(registry_address) - .map_err(|error| FromStrError::SystemContractRegistry(error.to_string()))?; - let _padding: [u8; 32] = TryFrom::try_from(padded_bytes.as_ref()).map_err(|_| { - FromStrError::SystemContractRegistry( - "Failed to deserialize system registry key".to_string(), - ) - })?; - return Ok(Key::SystemContractRegistry); - } - - if let Some(registry_address) = input.strip_prefix(CHAINSPEC_REGISTRY_PREFIX) { - let padded_bytes = checksummed_hex::decode(registry_address) - .map_err(|error| FromStrError::ChainspecRegistry(error.to_string()))?; - let _padding: [u8; 32] = TryFrom::try_from(padded_bytes.as_ref()).map_err(|_| { - FromStrError::ChainspecRegistry( - "Failed to deserialize chainspec registry key".to_string(), - ) - })?; - return Ok(Key::ChainspecRegistry); - } - - if let Some(registry_address) = input.strip_prefix(CHECKSUM_REGISTRY_PREFIX) { - let padded_bytes = checksummed_hex::decode(registry_address) - .map_err(|error| FromStrError::ChecksumRegistry(error.to_string()))?; - let _padding: [u8; 32] = TryFrom::try_from(padded_bytes.as_ref()).map_err(|_| { - FromStrError::ChecksumRegistry( - "Failed to deserialize checksum registry key".to_string(), - ) - })?; - return Ok(Key::ChecksumRegistry); - } - - if let Some(package_addr) = input.strip_prefix(PACKAGE_PREFIX) { - let package_addr_bytes = checksummed_hex::decode(package_addr) - .map_err(|error| FromStrError::Dictionary(error.to_string()))?; - let addr = PackageAddr::try_from(package_addr_bytes.as_ref()) - .map_err(|error| FromStrError::Package(error.to_string()))?; - return Ok(Key::Package(addr)); - } - - if let Some(entity) = input.strip_prefix(ENTITY_PREFIX) { - let (addr_str, tag) = if let Some(str) = entity.strip_prefix(ACCOUNT_ENTITY_PREFIX) { - (str, PackageKindTag::Account) - } else if let Some(str) = entity.strip_prefix(SYSTEM_ENTITY_PREFIX) { - (str, PackageKindTag::System) - } else if let Some(str) = entity.strip_prefix(CONTRACT_ENTITY_PREFIX) { - (str, PackageKindTag::SmartContract) - } else { - return Err(FromStrError::UnknownPrefix); - }; - let addr = checksummed_hex::decode(addr_str) - .map_err(|error| FromStrError::AddressableEntity(error.to_string()))?; - let entity_addr = EntityAddr::try_from(addr.as_ref()) - .map_err(|error| FromStrError::AddressableEntity(error.to_string()))?; - return Ok(Key::AddressableEntity(tag, entity_addr)); - } - - if let Some(byte_code) = input.strip_prefix(BYTE_CODE_PREFIX) { - let (addr_str, tag) = if let Some(str) = byte_code.strip_prefix(EMPTY_PREFIX) { - (str, ByteCodeKind::Empty) - } else if let Some(str) = byte_code.strip_prefix(V1_WASM_PREFIX) { - (str, ByteCodeKind::V1CasperWasm) - } else { - return Err(FromStrError::UnknownPrefix); - }; - let addr = checksummed_hex::decode(addr_str) - .map_err(|error| FromStrError::ByteCode(error.to_string()))?; - let byte_code_addr = ByteCodeAddr::try_from(addr.as_ref()) - .map_err(|error| FromStrError::ByteCode(error.to_string()))?; - return Ok(Key::ByteCode(tag, byte_code_addr)); - } - - match MessageAddr::from_formatted_str(input) { - Ok(message_addr) => return Ok(Key::Message(message_addr)), - Err(contract_messages::FromStrError::InvalidPrefix) => {} - Err(error) => return Err(error.into()), - } - - Err(FromStrError::UnknownPrefix) - } - - /// Returns the inner bytes of `self` if `self` is of type [`Key::Account`], otherwise returns - /// `None`. - pub fn into_account(self) -> Option { - match self { - Key::Account(bytes) => Some(bytes), - _ => None, - } - } - - /// Returns the inner bytes of `self` if `self` is of type [`Key::Hash`], otherwise returns - /// `None`. - pub fn into_hash_addr(self) -> Option { - match self { - Key::Hash(hash) => Some(hash), - _ => None, - } - } - - /// Returns the inner bytes of `self` if `self` is of type [`Key::AddressableEntity`], otherwise - /// returns `None`. - pub fn into_entity_addr(self) -> Option { - match self { - Key::AddressableEntity(_, hash) => Some(hash), - _ => None, - } - } - - /// Returns the inner bytes of `self` if `self` is of type [`Key::Package`], otherwise returns - /// `None`. - pub fn into_package_addr(self) -> Option { - match self { - Key::Package(package_addr) => Some(package_addr), - _ => None, - } - } - - /// Returns [`AddressableEntityHash`] of `self` if `self` is of type [`Key::AddressableEntity`], - /// otherwise returns `None`. - pub fn into_entity_hash(self) -> Option { - let entity_addr = self.into_entity_addr()?; - Some(AddressableEntityHash::new(entity_addr)) - } - - /// Returns [`PackageHash`] of `self` if `self` is of type [`Key::Package`], otherwise - /// returns `None`. - pub fn into_package_hash(self) -> Option { - let package_addr = self.into_package_addr()?; - Some(PackageHash::new(package_addr)) - } - - /// Returns a reference to the inner [`URef`] if `self` is of type [`Key::URef`], otherwise - /// returns `None`. - pub fn as_uref(&self) -> Option<&URef> { - match self { - Key::URef(uref) => Some(uref), - _ => None, - } - } - - /// Returns a reference to the inner [`URef`] if `self` is of type [`Key::URef`], otherwise - /// returns `None`. - pub fn as_uref_mut(&mut self) -> Option<&mut URef> { - match self { - Key::URef(uref) => Some(uref), - _ => None, - } - } - - /// Returns a reference to the inner `URefAddr` if `self` is of type [`Key::Balance`], - /// otherwise returns `None`. - pub fn as_balance(&self) -> Option<&URefAddr> { - if let Self::Balance(v) = self { - Some(v) - } else { - None - } - } - - /// Returns the inner [`URef`] if `self` is of type [`Key::URef`], otherwise returns `None`. - pub fn into_uref(self) -> Option { - match self { - Key::URef(uref) => Some(uref), - _ => None, - } - } - - /// Returns a reference to the inner [`DictionaryAddr`] if `self` is of type - /// [`Key::Dictionary`], otherwise returns `None`. - pub fn as_dictionary(&self) -> Option<&DictionaryAddr> { - match self { - Key::Dictionary(v) => Some(v), - _ => None, - } - } - - /// Casts a [`Key::URef`] to a [`Key::Hash`] - pub fn uref_to_hash(&self) -> Option { - let uref = self.as_uref()?; - let addr = uref.addr(); - Some(Key::Hash(addr)) - } - - /// Casts a [`Key::Withdraw`] to a [`Key::Unbond`] - pub fn withdraw_to_unbond(&self) -> Option { - if let Key::Withdraw(account_hash) = self { - return Some(Key::Unbond(*account_hash)); - } - None - } - - /// Creates a new [`Key::Dictionary`] variant based on a `seed_uref` and a `dictionary_item_key` - /// bytes. - pub fn dictionary(seed_uref: URef, dictionary_item_key: &[u8]) -> Key { - // NOTE: Expect below is safe because the length passed is supported. - let mut hasher = VarBlake2b::new(BLAKE2B_DIGEST_LENGTH).expect("should create hasher"); - hasher.update(seed_uref.addr().as_ref()); - hasher.update(dictionary_item_key); - // NOTE: Assumed safe as size of `HashAddr` equals to the output provided by hasher. - let mut addr = HashAddr::default(); - hasher.finalize_variable(|hash| addr.clone_from_slice(hash)); - Key::Dictionary(addr) - } - - /// Creates a new [`Key::AddressableEntity`] variant from a package kind and an entity - /// hash. - pub fn addressable_entity_key( - package_kind_tag: PackageKindTag, - entity_hash: AddressableEntityHash, - ) -> Self { - Key::AddressableEntity(package_kind_tag, entity_hash.value()) - } - - /// Creates a new [`Key::AddressableEntity`] for a Smart contract. - pub fn contract_entity_key(entity_hash: AddressableEntityHash) -> Key { - Self::addressable_entity_key(PackageKindTag::SmartContract, entity_hash) - } - - /// Creates a new [`Key::ByteCode`] variant from a byte code kind and an byte code addr. - pub fn byte_code_key(byte_code_kind: ByteCodeKind, byte_code_addr: ByteCodeAddr) -> Self { - Key::ByteCode(byte_code_kind, byte_code_addr) - } - - /// Creates a new [`Key::Message`] variant that identifies an indexed message based on an - /// `entity_addr`, `topic_name_hash` and message `index`. - pub fn message( - entity_addr: AddressableEntityHash, - topic_name_hash: TopicNameHash, - index: u32, - ) -> Key { - Key::Message(MessageAddr::new_message_addr( - entity_addr, - topic_name_hash, - index, - )) - } - - /// Creates a new [`Key::Message`] variant that identifies a message topic based on an - /// `entity_addr` and a hash of the topic name. - pub fn message_topic( - entity_addr: AddressableEntityHash, - topic_name_hash: TopicNameHash, - ) -> Key { - Key::Message(MessageAddr::new_topic_addr(entity_addr, topic_name_hash)) - } - - /// Returns true if the key is of type [`Key::Dictionary`]. - pub fn is_dictionary_key(&self) -> bool { - if let Key::Dictionary(_) = self { - return true; - } - false - } - - /// Returns true if the key is of type [`Key::Bid`]. - pub fn is_balance_key(&self) -> bool { - if let Key::Balance(_) = self { - return true; - } - false - } - - /// Returns true if the key is of type [`Key::BidAddr`]. - pub fn is_bid_addr_key(&self) -> bool { - if let Key::BidAddr(_) = self { - return true; - } - false - } - - /// Returns a reference to the inner `BidAddr` if `self` is of type [`Key::Bid`], - /// otherwise returns `None`. - pub fn as_bid_addr(&self) -> Option<&BidAddr> { - if let Self::BidAddr(addr) = self { - Some(addr) - } else { - None - } - } - - /// Returns if they inner Key is for a system contract entity. - pub fn is_system_key(&self) -> bool { - if let Self::AddressableEntity(PackageKindTag::System, _) = self { - return true; - } - - false - } - - /// Return true if the inner Key is of the smart contract type. - pub fn is_smart_contract_key(&self) -> bool { - if let Self::AddressableEntity(PackageKindTag::SmartContract, _) = self { - return true; - } - - false - } -} - -impl Display for Key { - fn fmt(&self, f: &mut Formatter) -> fmt::Result { - match self { - Key::Account(account_hash) => write!(f, "Key::Account({})", account_hash), - Key::Hash(addr) => write!(f, "Key::Hash({})", base16::encode_lower(&addr)), - Key::URef(uref) => write!(f, "Key::{}", uref), /* Display impl for URef will append */ - Key::Transfer(transfer_addr) => write!(f, "Key::Transfer({})", transfer_addr), - Key::DeployInfo(addr) => write!( - f, - "Key::DeployInfo({})", - base16::encode_lower(addr.as_ref()) - ), - Key::EraInfo(era_id) => write!(f, "Key::EraInfo({})", era_id), - Key::Balance(uref_addr) => { - write!(f, "Key::Balance({})", base16::encode_lower(uref_addr)) - } - Key::Bid(account_hash) => write!(f, "Key::Bid({})", account_hash), - Key::Withdraw(account_hash) => write!(f, "Key::Withdraw({})", account_hash), - Key::Dictionary(addr) => { - write!(f, "Key::Dictionary({})", base16::encode_lower(addr)) - } - Key::SystemContractRegistry => write!( - f, - "Key::SystemContractRegistry({})", - base16::encode_lower(&PADDING_BYTES) - ), - Key::EraSummary => write!( - f, - "Key::EraSummary({})", - base16::encode_lower(&PADDING_BYTES), - ), - Key::Unbond(account_hash) => write!(f, "Key::Unbond({})", account_hash), - Key::ChainspecRegistry => write!( - f, - "Key::ChainspecRegistry({})", - base16::encode_lower(&PADDING_BYTES) - ), - Key::ChecksumRegistry => { - write!( - f, - "Key::ChecksumRegistry({})", - base16::encode_lower(&PADDING_BYTES) - ) - } - Key::BidAddr(bid_addr) => write!(f, "Key::BidAddr({})", bid_addr), - Key::Message(message_addr) => { - write!(f, "Key::Message({})", message_addr) - } - Key::Package(package_addr) => { - write!(f, "Key::Package({})", base16::encode_lower(package_addr)) - } - Key::AddressableEntity(kind_tag, entity_addr) => write!( - f, - "Key::AddressableEntity({}-{})", - kind_tag, - base16::encode_lower(entity_addr) - ), - Key::ByteCode(kind, byte_code_addr) => { - write!( - f, - "Key::ByteCode({}-{})", - kind, - base16::encode_lower(byte_code_addr) - ) - } - } - } -} - -impl Debug for Key { - fn fmt(&self, f: &mut Formatter) -> fmt::Result { - write!(f, "{}", self) - } -} - -impl Tagged for Key { - fn tag(&self) -> KeyTag { - match self { - Key::Account(_) => KeyTag::Account, - Key::Hash(_) => KeyTag::Hash, - Key::URef(_) => KeyTag::URef, - Key::Transfer(_) => KeyTag::Transfer, - Key::DeployInfo(_) => KeyTag::DeployInfo, - Key::EraInfo(_) => KeyTag::EraInfo, - Key::Balance(_) => KeyTag::Balance, - Key::Bid(_) => KeyTag::Bid, - Key::Withdraw(_) => KeyTag::Withdraw, - Key::Dictionary(_) => KeyTag::Dictionary, - Key::SystemContractRegistry => KeyTag::SystemContractRegistry, - Key::EraSummary => KeyTag::EraSummary, - Key::Unbond(_) => KeyTag::Unbond, - Key::ChainspecRegistry => KeyTag::ChainspecRegistry, - Key::ChecksumRegistry => KeyTag::ChecksumRegistry, - Key::BidAddr(_) => KeyTag::BidAddr, - Key::Package(_) => KeyTag::Package, - Key::AddressableEntity(..) => KeyTag::AddressableEntity, - Key::ByteCode(..) => KeyTag::ByteCode, - Key::Message(_) => KeyTag::Message, - } - } -} - -impl Tagged for Key { - fn tag(&self) -> u8 { - let key_tag: KeyTag = self.tag(); - key_tag as u8 - } -} - -impl From for Key { - fn from(uref: URef) -> Key { - Key::URef(uref) - } -} - -impl From for Key { - fn from(account_hash: AccountHash) -> Key { - Key::Account(account_hash) - } -} - -impl From for Key { - fn from(transfer_addr: TransferAddr) -> Key { - Key::Transfer(transfer_addr) - } -} - -impl From for Key { - fn from(package_hash: PackageHash) -> Key { - Key::Package(package_hash.value()) - } -} - -impl From for Key { - fn from(wasm_hash: ContractWasmHash) -> Self { - Key::Hash(wasm_hash.value()) - } -} - -impl From for Key { - fn from(contract_package_hash: ContractPackageHash) -> Self { - Key::Hash(contract_package_hash.value()) - } -} - -impl From for Key { - fn from(contract_hash: ContractHash) -> Self { - Key::Hash(contract_hash.value()) - } -} - -impl ToBytes for Key { - fn to_bytes(&self) -> Result, Error> { - let mut result = bytesrepr::unchecked_allocate_buffer(self); - self.write_bytes(&mut result)?; - Ok(result) - } - - fn serialized_length(&self) -> usize { - match self { - Key::Account(account_hash) => { - KEY_ID_SERIALIZED_LENGTH + account_hash.serialized_length() - } - Key::Hash(_) => KEY_HASH_SERIALIZED_LENGTH, - Key::URef(_) => KEY_UREF_SERIALIZED_LENGTH, - Key::Transfer(_) => KEY_TRANSFER_SERIALIZED_LENGTH, - Key::DeployInfo(_) => KEY_DEPLOY_INFO_SERIALIZED_LENGTH, - Key::EraInfo(_) => KEY_ERA_INFO_SERIALIZED_LENGTH, - Key::Balance(_) => KEY_BALANCE_SERIALIZED_LENGTH, - Key::Bid(_) => KEY_BID_SERIALIZED_LENGTH, - Key::Withdraw(_) => KEY_WITHDRAW_SERIALIZED_LENGTH, - Key::Dictionary(_) => KEY_DICTIONARY_SERIALIZED_LENGTH, - Key::SystemContractRegistry => KEY_SYSTEM_CONTRACT_REGISTRY_SERIALIZED_LENGTH, - Key::EraSummary => KEY_ERA_SUMMARY_SERIALIZED_LENGTH, - Key::Unbond(_) => KEY_UNBOND_SERIALIZED_LENGTH, - Key::ChainspecRegistry => KEY_CHAINSPEC_REGISTRY_SERIALIZED_LENGTH, - Key::ChecksumRegistry => KEY_CHECKSUM_REGISTRY_SERIALIZED_LENGTH, - Key::BidAddr(bid_addr) => match bid_addr.tag() { - BidAddrTag::Unified => KEY_ID_SERIALIZED_LENGTH + bid_addr.serialized_length() - 1, - BidAddrTag::Validator | BidAddrTag::Delegator => { - KEY_ID_SERIALIZED_LENGTH + bid_addr.serialized_length() - } - }, - Key::Package(_) => KEY_PACKAGE_SERIALIZED_LENGTH, - Key::AddressableEntity(..) => { - U8_SERIALIZED_LENGTH + KEY_ID_SERIALIZED_LENGTH + ADDR_LENGTH - } - Key::ByteCode(..) => U8_SERIALIZED_LENGTH + KEY_ID_SERIALIZED_LENGTH + ADDR_LENGTH, - Key::Message(message_addr) => { - KEY_ID_SERIALIZED_LENGTH + message_addr.serialized_length() - } - } - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), Error> { - writer.push(self.tag()); - match self { - Key::Account(account_hash) => account_hash.write_bytes(writer), - Key::Hash(hash) => hash.write_bytes(writer), - Key::URef(uref) => uref.write_bytes(writer), - Key::Transfer(addr) => addr.write_bytes(writer), - Key::DeployInfo(deploy_hash) => deploy_hash.write_bytes(writer), - Key::EraInfo(era_id) => era_id.write_bytes(writer), - Key::Balance(uref_addr) => uref_addr.write_bytes(writer), - Key::Bid(account_hash) => account_hash.write_bytes(writer), - Key::Withdraw(account_hash) => account_hash.write_bytes(writer), - Key::Dictionary(addr) => addr.write_bytes(writer), - Key::Unbond(account_hash) => account_hash.write_bytes(writer), - Key::SystemContractRegistry - | Key::EraSummary - | Key::ChainspecRegistry - | Key::ChecksumRegistry => PADDING_BYTES.write_bytes(writer), - Key::BidAddr(bid_addr) => match bid_addr.tag() { - BidAddrTag::Unified => { - let bytes = bid_addr.to_bytes()?; - writer.extend(&bytes[1..]); - Ok(()) - } - BidAddrTag::Validator | BidAddrTag::Delegator => bid_addr.write_bytes(writer), - }, - Key::Package(package_addr) => package_addr.write_bytes(writer), - Key::AddressableEntity(package_kind_tag, entity_addr) => { - package_kind_tag.write_bytes(writer)?; - entity_addr.write_bytes(writer) - } - Key::ByteCode(byte_code_kind, byte_code_addr) => { - byte_code_kind.write_bytes(writer)?; - byte_code_addr.write_bytes(writer) - } - Key::Message(message_addr) => message_addr.write_bytes(writer), - } - } -} - -impl FromBytes for Key { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (tag, remainder) = KeyTag::from_bytes(bytes)?; - match tag { - KeyTag::Account => { - let (account_hash, rem) = AccountHash::from_bytes(remainder)?; - Ok((Key::Account(account_hash), rem)) - } - KeyTag::Hash => { - let (hash, rem) = HashAddr::from_bytes(remainder)?; - Ok((Key::Hash(hash), rem)) - } - KeyTag::URef => { - let (uref, rem) = URef::from_bytes(remainder)?; - Ok((Key::URef(uref), rem)) - } - KeyTag::Transfer => { - let (transfer_addr, rem) = TransferAddr::from_bytes(remainder)?; - Ok((Key::Transfer(transfer_addr), rem)) - } - KeyTag::DeployInfo => { - let (deploy_hash, rem) = DeployHash::from_bytes(remainder)?; - Ok((Key::DeployInfo(deploy_hash), rem)) - } - KeyTag::EraInfo => { - let (era_id, rem) = EraId::from_bytes(remainder)?; - Ok((Key::EraInfo(era_id), rem)) - } - KeyTag::Balance => { - let (uref_addr, rem) = URefAddr::from_bytes(remainder)?; - Ok((Key::Balance(uref_addr), rem)) - } - KeyTag::Bid => { - let (account_hash, rem) = AccountHash::from_bytes(remainder)?; - Ok((Key::Bid(account_hash), rem)) - } - KeyTag::Withdraw => { - let (account_hash, rem) = AccountHash::from_bytes(remainder)?; - Ok((Key::Withdraw(account_hash), rem)) - } - KeyTag::Dictionary => { - let (addr, rem) = DictionaryAddr::from_bytes(remainder)?; - Ok((Key::Dictionary(addr), rem)) - } - KeyTag::SystemContractRegistry => { - let (_, rem) = <[u8; 32]>::from_bytes(remainder)?; - Ok((Key::SystemContractRegistry, rem)) - } - KeyTag::EraSummary => { - let (_, rem) = <[u8; 32]>::from_bytes(remainder)?; - Ok((Key::EraSummary, rem)) - } - KeyTag::Unbond => { - let (account_hash, rem) = AccountHash::from_bytes(remainder)?; - Ok((Key::Unbond(account_hash), rem)) - } - KeyTag::ChainspecRegistry => { - let (_, rem) = <[u8; 32]>::from_bytes(remainder)?; - Ok((Key::ChainspecRegistry, rem)) - } - KeyTag::ChecksumRegistry => { - let (_, rem) = <[u8; 32]>::from_bytes(remainder)?; - Ok((Key::ChecksumRegistry, rem)) - } - KeyTag::BidAddr => { - let (bid_addr, rem) = BidAddr::from_bytes(remainder)?; - Ok((Key::BidAddr(bid_addr), rem)) - } - KeyTag::Package => { - let (package_addr, rem) = PackageAddr::from_bytes(remainder)?; - Ok((Key::Package(package_addr), rem)) - } - KeyTag::AddressableEntity => { - let (package_kind_tag, rem) = PackageKindTag::from_bytes(remainder)?; - let (entity_addr, rem) = EntityAddr::from_bytes(rem)?; - Ok((Key::AddressableEntity(package_kind_tag, entity_addr), rem)) - } - KeyTag::ByteCode => { - let (byte_code_kind, rem) = ByteCodeKind::from_bytes(remainder)?; - let (byte_code_addr, rem) = ByteCodeAddr::from_bytes(rem)?; - Ok((Key::ByteCode(byte_code_kind, byte_code_addr), rem)) - } - KeyTag::Message => { - let (message_addr, rem) = MessageAddr::from_bytes(remainder)?; - Ok((Key::Message(message_addr), rem)) - } - } - } -} - -#[allow(dead_code)] -fn please_add_to_distribution_impl(key: Key) { - // If you've been forced to come here, you likely need to add your variant to the - // `Distribution` impl for `Key`. - match key { - Key::Account(_) => unimplemented!(), - Key::Hash(_) => unimplemented!(), - Key::URef(_) => unimplemented!(), - Key::Transfer(_) => unimplemented!(), - Key::DeployInfo(_) => unimplemented!(), - Key::EraInfo(_) => unimplemented!(), - Key::Balance(_) => unimplemented!(), - Key::Bid(_) => unimplemented!(), - Key::Withdraw(_) => unimplemented!(), - Key::Dictionary(_) => unimplemented!(), - Key::SystemContractRegistry => unimplemented!(), - Key::EraSummary => unimplemented!(), - Key::Unbond(_) => unimplemented!(), - Key::ChainspecRegistry => unimplemented!(), - Key::ChecksumRegistry => unimplemented!(), - Key::BidAddr(_) => unimplemented!(), - Key::Package(_) => unimplemented!(), - Key::AddressableEntity(..) => unimplemented!(), - Key::ByteCode(..) => unimplemented!(), - Key::Message(_) => unimplemented!(), - } -} - -#[cfg(any(feature = "testing", test))] -impl Distribution for Standard { - fn sample(&self, rng: &mut R) -> Key { - match rng.gen_range(0..=18) { - 0 => Key::Account(rng.gen()), - 1 => Key::Hash(rng.gen()), - 2 => Key::URef(rng.gen()), - 3 => Key::Transfer(rng.gen()), - 4 => Key::DeployInfo(DeployHash::from_raw(rng.gen())), - 5 => Key::EraInfo(EraId::new(rng.gen())), - 6 => Key::Balance(rng.gen()), - 7 => Key::Bid(rng.gen()), - 8 => Key::Withdraw(rng.gen()), - 9 => Key::Dictionary(rng.gen()), - 10 => Key::SystemContractRegistry, - 11 => Key::EraSummary, - 12 => Key::Unbond(rng.gen()), - 13 => Key::ChainspecRegistry, - 14 => Key::ChecksumRegistry, - 15 => Key::BidAddr(rng.gen()), - 16 => Key::Package(rng.gen()), - 17 => Key::AddressableEntity(rng.gen(), rng.gen()), - 18 => Key::ByteCode(rng.gen(), rng.gen()), - 19 => Key::Message(rng.gen()), - _ => unreachable!(), - } - } -} - -mod serde_helpers { - use super::*; - - #[derive(Serialize)] - pub(super) enum BinarySerHelper<'a> { - Account(&'a AccountHash), - Hash(&'a HashAddr), - URef(&'a URef), - Transfer(&'a TransferAddr), - #[serde(with = "crate::serde_helpers::deploy_hash_as_array")] - DeployInfo(&'a DeployHash), - EraInfo(&'a EraId), - Balance(&'a URefAddr), - Bid(&'a AccountHash), - Withdraw(&'a AccountHash), - Dictionary(&'a HashAddr), - SystemContractRegistry, - EraSummary, - Unbond(&'a AccountHash), - ChainspecRegistry, - ChecksumRegistry, - BidAddr(&'a BidAddr), - Package(&'a PackageAddr), - AddressableEntity(&'a PackageKindTag, &'a EntityAddr), - ByteCode(&'a ByteCodeKind, &'a ByteCodeAddr), - Message(&'a MessageAddr), - } - - #[derive(Deserialize)] - pub(super) enum BinaryDeserHelper { - Account(AccountHash), - Hash(HashAddr), - URef(URef), - Transfer(TransferAddr), - #[serde(with = "crate::serde_helpers::deploy_hash_as_array")] - DeployInfo(DeployHash), - EraInfo(EraId), - Balance(URefAddr), - Bid(AccountHash), - Withdraw(AccountHash), - Dictionary(DictionaryAddr), - SystemContractRegistry, - EraSummary, - Unbond(AccountHash), - ChainspecRegistry, - ChecksumRegistry, - BidAddr(BidAddr), - Package(PackageAddr), - AddressableEntity(PackageKindTag, EntityAddr), - ByteCode(ByteCodeKind, ByteCodeAddr), - Message(MessageAddr), - } - - impl<'a> From<&'a Key> for BinarySerHelper<'a> { - fn from(key: &'a Key) -> Self { - match key { - Key::Account(account_hash) => BinarySerHelper::Account(account_hash), - Key::Hash(hash_addr) => BinarySerHelper::Hash(hash_addr), - Key::URef(uref) => BinarySerHelper::URef(uref), - Key::Transfer(transfer_addr) => BinarySerHelper::Transfer(transfer_addr), - Key::DeployInfo(deploy_hash) => BinarySerHelper::DeployInfo(deploy_hash), - Key::EraInfo(era_id) => BinarySerHelper::EraInfo(era_id), - Key::Balance(uref_addr) => BinarySerHelper::Balance(uref_addr), - Key::Bid(account_hash) => BinarySerHelper::Bid(account_hash), - Key::Withdraw(account_hash) => BinarySerHelper::Withdraw(account_hash), - Key::Dictionary(addr) => BinarySerHelper::Dictionary(addr), - Key::SystemContractRegistry => BinarySerHelper::SystemContractRegistry, - Key::EraSummary => BinarySerHelper::EraSummary, - Key::Unbond(account_hash) => BinarySerHelper::Unbond(account_hash), - Key::ChainspecRegistry => BinarySerHelper::ChainspecRegistry, - Key::ChecksumRegistry => BinarySerHelper::ChecksumRegistry, - Key::BidAddr(bid_addr) => BinarySerHelper::BidAddr(bid_addr), - Key::Message(message_addr) => BinarySerHelper::Message(message_addr), - Key::Package(package_addr) => BinarySerHelper::Package(package_addr), - Key::AddressableEntity(package_kind, entity_addr) => { - BinarySerHelper::AddressableEntity(package_kind, entity_addr) - } - Key::ByteCode(byte_code_kind, byte_code_addr) => { - BinarySerHelper::ByteCode(byte_code_kind, byte_code_addr) - } - } - } - } - - impl From for Key { - fn from(helper: BinaryDeserHelper) -> Self { - match helper { - BinaryDeserHelper::Account(account_hash) => Key::Account(account_hash), - BinaryDeserHelper::Hash(hash_addr) => Key::Hash(hash_addr), - BinaryDeserHelper::URef(uref) => Key::URef(uref), - BinaryDeserHelper::Transfer(transfer_addr) => Key::Transfer(transfer_addr), - BinaryDeserHelper::DeployInfo(deploy_hash) => Key::DeployInfo(deploy_hash), - BinaryDeserHelper::EraInfo(era_id) => Key::EraInfo(era_id), - BinaryDeserHelper::Balance(uref_addr) => Key::Balance(uref_addr), - BinaryDeserHelper::Bid(account_hash) => Key::Bid(account_hash), - BinaryDeserHelper::Withdraw(account_hash) => Key::Withdraw(account_hash), - BinaryDeserHelper::Dictionary(addr) => Key::Dictionary(addr), - BinaryDeserHelper::SystemContractRegistry => Key::SystemContractRegistry, - BinaryDeserHelper::EraSummary => Key::EraSummary, - BinaryDeserHelper::Unbond(account_hash) => Key::Unbond(account_hash), - BinaryDeserHelper::ChainspecRegistry => Key::ChainspecRegistry, - BinaryDeserHelper::ChecksumRegistry => Key::ChecksumRegistry, - BinaryDeserHelper::BidAddr(bid_addr) => Key::BidAddr(bid_addr), - BinaryDeserHelper::Message(message_addr) => Key::Message(message_addr), - BinaryDeserHelper::Package(package_addr) => Key::Package(package_addr), - BinaryDeserHelper::AddressableEntity(package_kind, entity_addr) => { - Key::AddressableEntity(package_kind, entity_addr) - } - BinaryDeserHelper::ByteCode(byte_kind, byte_code_addr) => { - Key::ByteCode(byte_kind, byte_code_addr) - } - } - } - } -} - -impl Serialize for Key { - fn serialize(&self, serializer: S) -> Result { - if serializer.is_human_readable() { - self.to_formatted_string().serialize(serializer) - } else { - serde_helpers::BinarySerHelper::from(self).serialize(serializer) - } - } -} - -impl<'de> Deserialize<'de> for Key { - fn deserialize>(deserializer: D) -> Result { - if deserializer.is_human_readable() { - let formatted_key = String::deserialize(deserializer)?; - Key::from_formatted_str(&formatted_key).map_err(SerdeError::custom) - } else { - let binary_helper = serde_helpers::BinaryDeserHelper::deserialize(deserializer)?; - Ok(Key::from(binary_helper)) - } - } -} - -#[cfg(test)] -mod tests { - use std::string::ToString; - - use super::*; - use crate::{ - account::ACCOUNT_HASH_FORMATTED_STRING_PREFIX, - bytesrepr::{Error, FromBytes}, - transfer::TRANSFER_ADDR_FORMATTED_STRING_PREFIX, - uref::UREF_FORMATTED_STRING_PREFIX, - AccessRights, URef, - }; - - const ACCOUNT_KEY: Key = Key::Account(AccountHash::new([42; 32])); - const HASH_KEY: Key = Key::Hash([42; 32]); - const UREF_KEY: Key = Key::URef(URef::new([42; 32], AccessRights::READ)); - const TRANSFER_KEY: Key = Key::Transfer(TransferAddr::new([42; 32])); - const DEPLOY_INFO_KEY: Key = Key::DeployInfo(DeployHash::from_raw([42; 32])); - const ERA_INFO_KEY: Key = Key::EraInfo(EraId::new(42)); - const BALANCE_KEY: Key = Key::Balance([42; 32]); - const BID_KEY: Key = Key::Bid(AccountHash::new([42; 32])); - const UNIFIED_BID_KEY: Key = Key::BidAddr(BidAddr::legacy([42; 32])); - const VALIDATOR_BID_KEY: Key = Key::BidAddr(BidAddr::new_validator_addr([2; 32])); - const DELEGATOR_BID_KEY: Key = Key::BidAddr(BidAddr::new_delegator_addr(([2; 32], [9; 32]))); - const WITHDRAW_KEY: Key = Key::Withdraw(AccountHash::new([42; 32])); - const DICTIONARY_KEY: Key = Key::Dictionary([42; 32]); - const SYSTEM_CONTRACT_REGISTRY_KEY: Key = Key::SystemContractRegistry; - const ERA_SUMMARY_KEY: Key = Key::EraSummary; - const UNBOND_KEY: Key = Key::Unbond(AccountHash::new([42; 32])); - const CHAINSPEC_REGISTRY_KEY: Key = Key::ChainspecRegistry; - const CHECKSUM_REGISTRY_KEY: Key = Key::ChecksumRegistry; - const PACKAGE_KEY: Key = Key::Package([42; 32]); - const ADDRESSABLE_ENTITY_SYSTEM_KEY: Key = - Key::AddressableEntity(PackageKindTag::System, [42; 32]); - const ADDRESSABLE_ENTITY_ACCOUNT_KEY: Key = - Key::AddressableEntity(PackageKindTag::Account, [42; 32]); - const ADDRESSABLE_ENTITY_SMART_CONTRACT_KEY: Key = - Key::AddressableEntity(PackageKindTag::SmartContract, [42; 32]); - const BYTE_CODE_EMPTY_KEY: Key = Key::ByteCode(ByteCodeKind::Empty, [42; 32]); - const BYTE_CODE_V1_WASM_KEY: Key = Key::ByteCode(ByteCodeKind::V1CasperWasm, [42; 32]); - const MESSAGE_TOPIC_KEY: Key = Key::Message(MessageAddr::new_topic_addr( - AddressableEntityHash::new([42u8; 32]), - TopicNameHash::new([42; 32]), - )); - const MESSAGE_KEY: Key = Key::Message(MessageAddr::new_message_addr( - AddressableEntityHash::new([42u8; 32]), - TopicNameHash::new([2; 32]), - 15, - )); - const KEYS: &[Key] = &[ - ACCOUNT_KEY, - HASH_KEY, - UREF_KEY, - TRANSFER_KEY, - DEPLOY_INFO_KEY, - ERA_INFO_KEY, - BALANCE_KEY, - BID_KEY, - WITHDRAW_KEY, - DICTIONARY_KEY, - SYSTEM_CONTRACT_REGISTRY_KEY, - ERA_SUMMARY_KEY, - UNBOND_KEY, - CHAINSPEC_REGISTRY_KEY, - CHECKSUM_REGISTRY_KEY, - UNIFIED_BID_KEY, - VALIDATOR_BID_KEY, - DELEGATOR_BID_KEY, - PACKAGE_KEY, - ADDRESSABLE_ENTITY_SYSTEM_KEY, - ADDRESSABLE_ENTITY_ACCOUNT_KEY, - ADDRESSABLE_ENTITY_SMART_CONTRACT_KEY, - BYTE_CODE_EMPTY_KEY, - BYTE_CODE_V1_WASM_KEY, - MESSAGE_TOPIC_KEY, - MESSAGE_KEY, - ]; - const HEX_STRING: &str = "2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a"; - const TOPIC_NAME_HEX_STRING: &str = - "0202020202020202020202020202020202020202020202020202020202020202"; - const MESSAGE_INDEX_HEX_STRING: &str = "f"; - const UNIFIED_HEX_STRING: &str = - "002a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a"; - const VALIDATOR_HEX_STRING: &str = - "010202020202020202020202020202020202020202020202020202020202020202"; - const DELEGATOR_HEX_STRING: &str = - "0202020202020202020202020202020202020202020202020202020202020202020909090909090909090909090909090909090909090909090909090909090909"; - - fn test_readable(right: AccessRights, is_true: bool) { - assert_eq!(right.is_readable(), is_true) - } - - #[test] - fn test_is_readable() { - test_readable(AccessRights::READ, true); - test_readable(AccessRights::READ_ADD, true); - test_readable(AccessRights::READ_WRITE, true); - test_readable(AccessRights::READ_ADD_WRITE, true); - test_readable(AccessRights::ADD, false); - test_readable(AccessRights::ADD_WRITE, false); - test_readable(AccessRights::WRITE, false); - } - - fn test_writable(right: AccessRights, is_true: bool) { - assert_eq!(right.is_writeable(), is_true) - } - - #[test] - fn test_is_writable() { - test_writable(AccessRights::WRITE, true); - test_writable(AccessRights::READ_WRITE, true); - test_writable(AccessRights::ADD_WRITE, true); - test_writable(AccessRights::READ, false); - test_writable(AccessRights::ADD, false); - test_writable(AccessRights::READ_ADD, false); - test_writable(AccessRights::READ_ADD_WRITE, true); - } - - fn test_addable(right: AccessRights, is_true: bool) { - assert_eq!(right.is_addable(), is_true) - } - - #[test] - fn test_is_addable() { - test_addable(AccessRights::ADD, true); - test_addable(AccessRights::READ_ADD, true); - test_addable(AccessRights::READ_WRITE, false); - test_addable(AccessRights::ADD_WRITE, true); - test_addable(AccessRights::READ, false); - test_addable(AccessRights::WRITE, false); - test_addable(AccessRights::READ_ADD_WRITE, true); - } - - #[test] - fn should_display_key() { - assert_eq!( - format!("{}", ACCOUNT_KEY), - format!("Key::Account({})", HEX_STRING) - ); - assert_eq!( - format!("{}", HASH_KEY), - format!("Key::Hash({})", HEX_STRING) - ); - assert_eq!( - format!("{}", UREF_KEY), - format!("Key::URef({}, READ)", HEX_STRING) - ); - assert_eq!( - format!("{}", TRANSFER_KEY), - format!("Key::Transfer({})", HEX_STRING) - ); - assert_eq!( - format!("{}", DEPLOY_INFO_KEY), - format!("Key::DeployInfo({})", HEX_STRING) - ); - assert_eq!( - format!("{}", ERA_INFO_KEY), - "Key::EraInfo(era 42)".to_string() - ); - assert_eq!( - format!("{}", BALANCE_KEY), - format!("Key::Balance({})", HEX_STRING) - ); - assert_eq!(format!("{}", BID_KEY), format!("Key::Bid({})", HEX_STRING)); - assert_eq!( - format!("{}", UNIFIED_BID_KEY), - format!("Key::BidAddr({})", UNIFIED_HEX_STRING) - ); - assert_eq!( - format!("{}", VALIDATOR_BID_KEY), - format!("Key::BidAddr({})", VALIDATOR_HEX_STRING) - ); - assert_eq!( - format!("{}", DELEGATOR_BID_KEY), - format!("Key::BidAddr({})", DELEGATOR_HEX_STRING) - ); - assert_eq!( - format!("{}", WITHDRAW_KEY), - format!("Key::Withdraw({})", HEX_STRING) - ); - assert_eq!( - format!("{}", DICTIONARY_KEY), - format!("Key::Dictionary({})", HEX_STRING) - ); - assert_eq!( - format!("{}", SYSTEM_CONTRACT_REGISTRY_KEY), - format!( - "Key::SystemContractRegistry({})", - base16::encode_lower(&PADDING_BYTES) - ) - ); - assert_eq!( - format!("{}", ERA_SUMMARY_KEY), - format!("Key::EraSummary({})", base16::encode_lower(&PADDING_BYTES)) - ); - assert_eq!( - format!("{}", UNBOND_KEY), - format!("Key::Unbond({})", HEX_STRING) - ); - assert_eq!( - format!("{}", CHAINSPEC_REGISTRY_KEY), - format!( - "Key::ChainspecRegistry({})", - base16::encode_lower(&PADDING_BYTES) - ) - ); - assert_eq!( - format!("{}", CHECKSUM_REGISTRY_KEY), - format!( - "Key::ChecksumRegistry({})", - base16::encode_lower(&PADDING_BYTES), - ) - ); - assert_eq!( - format!("{}", PACKAGE_KEY), - format!("Key::Package({})", HEX_STRING) - ); - assert_eq!( - format!("{}", ADDRESSABLE_ENTITY_SYSTEM_KEY), - format!("Key::AddressableEntity(system-{})", HEX_STRING) - ); - assert_eq!( - format!("{}", ADDRESSABLE_ENTITY_ACCOUNT_KEY), - format!("Key::AddressableEntity(account-{})", HEX_STRING) - ); - assert_eq!( - format!("{}", ADDRESSABLE_ENTITY_SMART_CONTRACT_KEY), - format!("Key::AddressableEntity(smart-contract-{})", HEX_STRING) - ); - assert_eq!( - format!("{}", BYTE_CODE_EMPTY_KEY), - format!("Key::ByteCode(empty-{})", HEX_STRING) - ); - assert_eq!( - format!("{}", BYTE_CODE_V1_WASM_KEY), - format!("Key::ByteCode(v1-casper-wasm-{})", HEX_STRING) - ); - assert_eq!( - format!("{}", MESSAGE_TOPIC_KEY), - format!("Key::Message({}-{})", HEX_STRING, HEX_STRING) - ); - - assert_eq!( - format!("{}", MESSAGE_KEY), - format!( - "Key::Message({}-{}-{})", - HEX_STRING, TOPIC_NAME_HEX_STRING, MESSAGE_INDEX_HEX_STRING - ) - ) - } - - #[test] - fn abuse_vec_key() { - // Prefix is 2^32-1 = shouldn't allocate that much - let bytes: Vec = vec![255, 255, 255, 255, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9]; - let res: Result<(Vec, &[u8]), _> = FromBytes::from_bytes(&bytes); - #[cfg(target_os = "linux")] - assert_eq!(res.expect_err("should fail"), Error::OutOfMemory); - #[cfg(target_os = "macos")] - assert_eq!(res.expect_err("should fail"), Error::EarlyEndOfStream); - } - - #[test] - fn check_key_account_getters() { - let account = [42; 32]; - let account_hash = AccountHash::new(account); - let key1 = Key::Account(account_hash); - assert_eq!(key1.into_account(), Some(account_hash)); - assert!(key1.into_entity_addr().is_none()); - assert!(key1.as_uref().is_none()); - } - - #[test] - fn check_key_hash_getters() { - let hash = [42; KEY_HASH_LENGTH]; - let key1 = Key::Hash(hash); - assert!(key1.into_account().is_none()); - assert_eq!(key1.into_hash_addr(), Some(hash)); - assert!(key1.as_uref().is_none()); - } - - #[test] - fn check_entity_key_getters() { - let hash = [42; KEY_HASH_LENGTH]; - let key1 = Key::contract_entity_key(AddressableEntityHash::new(hash)); - assert!(key1.into_account().is_none()); - assert_eq!(key1.into_entity_addr(), Some(hash)); - assert!(key1.as_uref().is_none()); - } - - #[test] - fn check_package_key_getters() { - let hash = [42; KEY_HASH_LENGTH]; - let key1 = Key::Package(hash); - assert!(key1.into_account().is_none()); - assert_eq!(key1.into_package_addr(), Some(hash)); - assert!(key1.as_uref().is_none()); - } - - #[test] - fn check_key_uref_getters() { - let uref = URef::new([42; 32], AccessRights::READ_ADD_WRITE); - let key1 = Key::URef(uref); - assert!(key1.into_account().is_none()); - assert!(key1.into_entity_addr().is_none()); - assert_eq!(key1.as_uref(), Some(&uref)); - } - - #[test] - fn key_max_serialized_length() { - let mut got_max = false; - for key in KEYS { - let expected = Key::max_serialized_length(); - let actual = key.serialized_length(); - assert!( - actual <= expected, - "key too long {} expected {} actual {}", - key, - expected, - actual - ); - if actual == Key::max_serialized_length() { - got_max = true; - } - } - assert!( - got_max, - "None of the Key variants has a serialized_length equal to \ - Key::max_serialized_length(), so Key::max_serialized_length() should be reduced" - ); - } - - #[test] - fn should_parse_legacy_bid_key_from_string() { - let account_hash = AccountHash([1; 32]); - let legacy_bid_key = Key::Bid(account_hash); - let original_string = legacy_bid_key.to_formatted_string(); - - let parsed_bid_key = - Key::from_formatted_str(&original_string).expect("{string} (key = {key:?})"); - if let Key::Bid(parsed_account_hash) = parsed_bid_key { - assert_eq!(parsed_account_hash, account_hash,); - assert_eq!(legacy_bid_key, parsed_bid_key); - - let translated_string = parsed_bid_key.to_formatted_string(); - assert_eq!(original_string, translated_string); - } else { - panic!("should have account hash"); - } - } - - #[test] - fn should_parse_legacy_unified_bid_key_from_string() { - let legacy_bid_addr = BidAddr::legacy([1; 32]); - let legacy_bid_key = Key::BidAddr(legacy_bid_addr); - assert_eq!(legacy_bid_addr.tag(), BidAddrTag::Unified,); - - let original_string = legacy_bid_key.to_formatted_string(); - let parsed_key = - Key::from_formatted_str(&original_string).expect("{string} (key = {key:?})"); - let parsed_bid_addr = parsed_key.as_bid_addr().expect("must have bid addr"); - assert!(parsed_key.is_bid_addr_key()); - assert_eq!(parsed_bid_addr.tag(), legacy_bid_addr.tag(),); - assert_eq!(*parsed_bid_addr, legacy_bid_addr); - - let translated_string = parsed_key.to_formatted_string(); - assert_eq!(original_string, translated_string); - assert_eq!(parsed_key.as_bid_addr(), legacy_bid_key.as_bid_addr(),); - } - - #[test] - fn should_parse_validator_bid_key_from_string() { - let validator_bid_addr = BidAddr::new_validator_addr([1; 32]); - let validator_bid_key = Key::BidAddr(validator_bid_addr); - assert_eq!(validator_bid_addr.tag(), BidAddrTag::Validator,); - - let original_string = validator_bid_key.to_formatted_string(); - let parsed_key = - Key::from_formatted_str(&original_string).expect("{string} (key = {key:?})"); - let parsed_bid_addr = parsed_key.as_bid_addr().expect("must have bid addr"); - assert!(parsed_key.is_bid_addr_key()); - assert_eq!(parsed_bid_addr.tag(), validator_bid_addr.tag(),); - assert_eq!(*parsed_bid_addr, validator_bid_addr,); - - let translated_string = parsed_key.to_formatted_string(); - assert_eq!(original_string, translated_string); - assert_eq!(parsed_key.as_bid_addr(), validator_bid_key.as_bid_addr(),); - } - - #[test] - fn should_parse_delegator_bid_key_from_string() { - let delegator_bid_addr = BidAddr::new_delegator_addr(([1; 32], [9; 32])); - let delegator_bid_key = Key::BidAddr(delegator_bid_addr); - assert_eq!(delegator_bid_addr.tag(), BidAddrTag::Delegator,); - - let original_string = delegator_bid_key.to_formatted_string(); - - let parsed_key = - Key::from_formatted_str(&original_string).expect("{string} (key = {key:?})"); - let parsed_bid_addr = parsed_key.as_bid_addr().expect("must have bid addr"); - assert!(parsed_key.is_bid_addr_key()); - assert_eq!(parsed_bid_addr.tag(), delegator_bid_addr.tag(),); - assert_eq!(*parsed_bid_addr, delegator_bid_addr,); - - let translated_string = parsed_key.to_formatted_string(); - assert_eq!(original_string, translated_string); - assert_eq!(parsed_key.as_bid_addr(), delegator_bid_key.as_bid_addr(),); - } - - #[test] - fn should_parse_key_from_str() { - for key in KEYS { - let string = key.to_formatted_string(); - let parsed_key = Key::from_formatted_str(&string).expect("{string} (key = {key:?})"); - assert_eq!(parsed_key, *key, "{string} (key = {key:?})"); - } - } - - #[test] - fn should_fail_to_parse_key_from_str() { - assert!( - Key::from_formatted_str(ACCOUNT_HASH_FORMATTED_STRING_PREFIX) - .unwrap_err() - .to_string() - .starts_with("account-key from string error: ") - ); - assert!(Key::from_formatted_str(HASH_PREFIX) - .unwrap_err() - .to_string() - .starts_with("hash-key from string error: ")); - assert!(Key::from_formatted_str(UREF_FORMATTED_STRING_PREFIX) - .unwrap_err() - .to_string() - .starts_with("uref-key from string error: ")); - assert!( - Key::from_formatted_str(TRANSFER_ADDR_FORMATTED_STRING_PREFIX) - .unwrap_err() - .to_string() - .starts_with("transfer-key from string error: ") - ); - assert!(Key::from_formatted_str(DEPLOY_INFO_PREFIX) - .unwrap_err() - .to_string() - .starts_with("deploy-info-key from string error: ")); - assert!(Key::from_formatted_str(ERA_INFO_PREFIX) - .unwrap_err() - .to_string() - .starts_with("era-info-key from string error: ")); - assert!(Key::from_formatted_str(BALANCE_PREFIX) - .unwrap_err() - .to_string() - .starts_with("balance-key from string error: ")); - assert!(Key::from_formatted_str(BID_PREFIX) - .unwrap_err() - .to_string() - .starts_with("bid-key from string error: ")); - assert!(Key::from_formatted_str(WITHDRAW_PREFIX) - .unwrap_err() - .to_string() - .starts_with("withdraw-key from string error: ")); - assert!(Key::from_formatted_str(DICTIONARY_PREFIX) - .unwrap_err() - .to_string() - .starts_with("dictionary-key from string error: ")); - assert!(Key::from_formatted_str(SYSTEM_CONTRACT_REGISTRY_PREFIX) - .unwrap_err() - .to_string() - .starts_with("system-contract-registry-key from string error: ")); - assert!(Key::from_formatted_str(ERA_SUMMARY_PREFIX) - .unwrap_err() - .to_string() - .starts_with("era-summary-key from string error")); - assert!(Key::from_formatted_str(UNBOND_PREFIX) - .unwrap_err() - .to_string() - .starts_with("unbond-key from string error: ")); - assert!(Key::from_formatted_str(CHAINSPEC_REGISTRY_PREFIX) - .unwrap_err() - .to_string() - .starts_with("chainspec-registry-key from string error: ")); - assert!(Key::from_formatted_str(CHECKSUM_REGISTRY_PREFIX) - .unwrap_err() - .to_string() - .starts_with("checksum-registry-key from string error: ")); - let bid_addr_err = Key::from_formatted_str(BID_ADDR_PREFIX) - .unwrap_err() - .to_string(); - assert!( - bid_addr_err.starts_with("bid-addr-key from string error: "), - "{}", - bid_addr_err - ); - assert!(Key::from_formatted_str(PACKAGE_PREFIX) - .unwrap_err() - .to_string() - .starts_with("package-key from string error: ")); - assert!( - Key::from_formatted_str(&format!("{}{}", ENTITY_PREFIX, ACCOUNT_ENTITY_PREFIX)) - .unwrap_err() - .to_string() - .starts_with("addressable-entity-key from string error: ") - ); - assert!( - Key::from_formatted_str(&format!("{}{}", BYTE_CODE_PREFIX, EMPTY_PREFIX)) - .unwrap_err() - .to_string() - .starts_with("byte-code-key from string error: ") - ); - let invalid_prefix = "a-0000000000000000000000000000000000000000000000000000000000000000"; - assert_eq!( - Key::from_formatted_str(invalid_prefix) - .unwrap_err() - .to_string(), - "unknown prefix for key" - ); - - let missing_hyphen_prefix = - "hash0000000000000000000000000000000000000000000000000000000000000000"; - assert_eq!( - Key::from_formatted_str(missing_hyphen_prefix) - .unwrap_err() - .to_string(), - "unknown prefix for key" - ); - - let no_prefix = "0000000000000000000000000000000000000000000000000000000000000000"; - assert_eq!( - Key::from_formatted_str(no_prefix).unwrap_err().to_string(), - "unknown prefix for key" - ); - } - - #[test] - fn key_to_json() { - for key in KEYS.iter() { - assert_eq!( - serde_json::to_string(key).unwrap(), - format!("\"{}\"", key.to_formatted_string()) - ); - } - } - - #[test] - fn serialization_roundtrip_bincode() { - for key in KEYS { - let encoded = bincode::serialize(key).unwrap(); - let decoded = bincode::deserialize(&encoded).unwrap(); - assert_eq!(key, &decoded); - } - } - - #[test] - fn key_tag_bytes_roundtrip() { - for key in KEYS { - let tag: KeyTag = key.tag(); - bytesrepr::test_serialization_roundtrip(&tag); - } - } - - #[test] - fn serialization_roundtrip_json() { - let round_trip = |key: &Key| { - let encoded = serde_json::to_value(key).unwrap(); - let decoded = serde_json::from_value(encoded.clone()) - .unwrap_or_else(|_| panic!("{} {}", key, encoded)); - assert_eq!(key, &decoded); - }; - - for key in KEYS { - round_trip(key); - } - - let zeros = [0; BLAKE2B_DIGEST_LENGTH]; - let nines = [9; BLAKE2B_DIGEST_LENGTH]; - - round_trip(&Key::Account(AccountHash::new(zeros))); - round_trip(&Key::Hash(zeros)); - round_trip(&Key::URef(URef::new(zeros, AccessRights::READ))); - round_trip(&Key::Transfer(TransferAddr::new(zeros))); - round_trip(&Key::DeployInfo(DeployHash::from_raw(zeros))); - round_trip(&Key::EraInfo(EraId::from(0))); - round_trip(&Key::Balance(URef::new(zeros, AccessRights::READ).addr())); - round_trip(&Key::Bid(AccountHash::new(zeros))); - round_trip(&Key::BidAddr(BidAddr::legacy(zeros))); - round_trip(&Key::BidAddr(BidAddr::new_validator_addr(zeros))); - round_trip(&Key::BidAddr(BidAddr::new_delegator_addr((zeros, nines)))); - round_trip(&Key::Withdraw(AccountHash::new(zeros))); - round_trip(&Key::Dictionary(zeros)); - round_trip(&Key::Unbond(AccountHash::new(zeros))); - round_trip(&Key::Package(zeros)); - round_trip(&Key::AddressableEntity(PackageKindTag::System, zeros)); - round_trip(&Key::AddressableEntity(PackageKindTag::Account, zeros)); - round_trip(&Key::AddressableEntity( - PackageKindTag::SmartContract, - zeros, - )); - round_trip(&Key::ByteCode(ByteCodeKind::Empty, zeros)); - round_trip(&Key::ByteCode(ByteCodeKind::V1CasperWasm, zeros)); - round_trip(&Key::Message(MessageAddr::new_topic_addr( - zeros.into(), - nines.into(), - ))); - round_trip(&Key::Message(MessageAddr::new_message_addr( - zeros.into(), - nines.into(), - 1, - ))); - } -} diff --git a/casper_types_ver_2_0/src/lib.rs b/casper_types_ver_2_0/src/lib.rs deleted file mode 100644 index 20427aa3..00000000 --- a/casper_types_ver_2_0/src/lib.rs +++ /dev/null @@ -1,215 +0,0 @@ -//! Types used to allow creation of Wasm contracts and tests for use on the Casper Platform. - -#![cfg_attr( - not(any( - feature = "json-schema", - feature = "datasize", - feature = "std", - feature = "testing", - test, - )), - no_std -)] -#![doc(html_root_url = "https://docs.rs/casper-types/3.0.0")] -#![doc( - html_favicon_url = "https://raw.githubusercontent.com/casper-network/casper-node/blob/dev/images/Casper_Logo_Favicon_48.png", - html_logo_url = "https://raw.githubusercontent.com/casper-network/casper-node/blob/dev/images/Casper_Logo_Favicon.png" -)] -#![warn(missing_docs)] -#![cfg_attr(docsrs, feature(doc_auto_cfg))] - -#[cfg_attr(not(test), macro_use)] -extern crate alloc; -extern crate core; - -mod access_rights; -pub mod account; -pub mod addressable_entity; -pub mod api_error; -mod auction_state; -pub mod binary_port; -mod block; -mod block_time; -mod byte_code; -pub mod bytesrepr; -#[cfg(any(feature = "std", test))] -mod chainspec; -pub mod checksummed_hex; -mod cl_type; -mod cl_value; -pub mod contract_messages; -mod contract_wasm; -pub mod contracts; -pub mod crypto; -mod deploy_info; -mod digest; -mod display_iter; -mod era_id; -pub mod execution; -#[cfg(any(feature = "std", test))] -pub mod file_utils; -mod gas; -#[cfg(any(feature = "testing", feature = "gens", test))] -pub mod gens; -mod json_pretty_printer; -mod key; -mod motes; -pub mod package; -mod peers_map; -mod phase; -mod protocol_version; -mod reactor_state; -mod semver; -pub(crate) mod serde_helpers; -mod stored_value; -pub mod system; -mod tagged; -#[cfg(any(feature = "testing", test))] -pub mod testing; -mod timestamp; -mod transaction; -mod transfer; -mod transfer_result; -mod uint; -mod uref; -mod validator_change; - -#[cfg(feature = "std")] -use libc::{c_long, sysconf, _SC_PAGESIZE}; -#[cfg(feature = "std")] -use once_cell::sync::Lazy; - -pub use crate::uint::{UIntParseError, U128, U256, U512}; - -pub use access_rights::{ - AccessRights, ContextAccessRights, GrantedAccess, ACCESS_RIGHTS_SERIALIZED_LENGTH, -}; -#[doc(inline)] -pub use addressable_entity::{ - AddressableEntity, AddressableEntityHash, EntryPoint, EntryPointAccess, EntryPointType, - EntryPoints, Parameter, -}; -#[doc(inline)] -pub use api_error::ApiError; -pub use auction_state::{AuctionState, JsonEraValidators, JsonValidatorWeights}; -#[cfg(all(feature = "std", feature = "json-schema"))] -pub use block::JsonBlockWithSignatures; -pub use block::{ - AvailableBlockRange, Block, BlockBody, BlockBodyV1, BlockBodyV2, BlockHash, BlockHashAndHeight, - BlockHeader, BlockHeaderV1, BlockHeaderV2, BlockIdentifier, BlockSignatures, - BlockSignaturesMergeError, BlockSyncStatus, BlockSynchronizerStatus, BlockV1, BlockV2, - BlockValidationError, EraEnd, EraEndV1, EraEndV2, EraReport, FinalitySignature, - FinalitySignatureId, RewardedSignatures, Rewards, SignedBlock, SignedBlockHeader, - SignedBlockHeaderValidationError, SingleBlockRewardedSignatures, -}; -#[cfg(any(feature = "testing", test))] -pub use block::{TestBlockBuilder, TestBlockV1Builder}; -pub use block_time::{BlockTime, BLOCKTIME_SERIALIZED_LENGTH}; -pub use byte_code::{ByteCode, ByteCodeHash, ByteCodeKind}; -#[cfg(any(feature = "std", test))] -pub use chainspec::{ - AccountConfig, AccountsConfig, ActivationPoint, AdministratorAccount, AuctionCosts, - BrTableCost, Chainspec, ChainspecRawBytes, ChainspecRegistry, ConsensusProtocolName, - ControlFlowCosts, CoreConfig, DelegatorConfig, DeployConfig, FeeHandling, GenesisAccount, - GenesisValidator, GlobalStateUpdate, GlobalStateUpdateConfig, GlobalStateUpdateError, - HandlePaymentCosts, HighwayConfig, HostFunction, HostFunctionCost, HostFunctionCosts, - LegacyRequiredFinality, MessageLimits, MintCosts, NetworkConfig, NextUpgrade, OpcodeCosts, - ProtocolConfig, RefundHandling, StandardPaymentCosts, StorageCosts, SystemConfig, - TransactionConfig, TransactionV1Config, UpgradeConfig, ValidatorConfig, WasmConfig, - DEFAULT_HOST_FUNCTION_NEW_DICTIONARY, -}; -#[cfg(any(all(feature = "std", feature = "testing"), test))] -pub use chainspec::{ - DEFAULT_ADD_BID_COST, DEFAULT_ADD_COST, DEFAULT_BIT_COST, DEFAULT_CONST_COST, - DEFAULT_CONTROL_FLOW_BLOCK_OPCODE, DEFAULT_CONTROL_FLOW_BR_IF_OPCODE, - DEFAULT_CONTROL_FLOW_BR_OPCODE, DEFAULT_CONTROL_FLOW_BR_TABLE_MULTIPLIER, - DEFAULT_CONTROL_FLOW_BR_TABLE_OPCODE, DEFAULT_CONTROL_FLOW_CALL_INDIRECT_OPCODE, - DEFAULT_CONTROL_FLOW_CALL_OPCODE, DEFAULT_CONTROL_FLOW_DROP_OPCODE, - DEFAULT_CONTROL_FLOW_ELSE_OPCODE, DEFAULT_CONTROL_FLOW_END_OPCODE, - DEFAULT_CONTROL_FLOW_IF_OPCODE, DEFAULT_CONTROL_FLOW_LOOP_OPCODE, - DEFAULT_CONTROL_FLOW_RETURN_OPCODE, DEFAULT_CONTROL_FLOW_SELECT_OPCODE, - DEFAULT_CONVERSION_COST, DEFAULT_CURRENT_MEMORY_COST, DEFAULT_DELEGATE_COST, DEFAULT_DIV_COST, - DEFAULT_GLOBAL_COST, DEFAULT_GROW_MEMORY_COST, DEFAULT_INTEGER_COMPARISON_COST, - DEFAULT_LOAD_COST, DEFAULT_LOCAL_COST, DEFAULT_MAX_PAYMENT_MOTES, DEFAULT_MAX_STACK_HEIGHT, - DEFAULT_MIN_TRANSFER_MOTES, DEFAULT_MUL_COST, DEFAULT_NEW_DICTIONARY_COST, DEFAULT_NOP_COST, - DEFAULT_STORE_COST, DEFAULT_TRANSFER_COST, DEFAULT_UNREACHABLE_COST, - DEFAULT_WASMLESS_TRANSFER_COST, DEFAULT_WASM_MAX_MEMORY, -}; -pub use cl_type::{named_key_type, CLType, CLTyped}; -pub use cl_value::{CLTypeMismatch, CLValue, CLValueError}; -pub use contract_wasm::ContractWasm; -#[doc(inline)] -pub use contracts::Contract; -pub use crypto::*; -pub use deploy_info::DeployInfo; -pub use digest::{ - ChunkWithProof, ChunkWithProofVerificationError, Digest, DigestError, IndexedMerkleProof, - MerkleConstructionError, MerkleVerificationError, -}; -pub use display_iter::DisplayIter; -pub use era_id::EraId; -pub use gas::Gas; -pub use json_pretty_printer::json_pretty_print; -#[doc(inline)] -pub use key::{ - ByteCodeAddr, DictionaryAddr, EntityAddr, FromStrError as KeyFromStrError, HashAddr, Key, - KeyTag, PackageAddr, BLAKE2B_DIGEST_LENGTH, DICTIONARY_ITEM_KEY_MAX_LENGTH, - KEY_DICTIONARY_LENGTH, KEY_HASH_LENGTH, -}; -pub use motes::Motes; -#[doc(inline)] -pub use package::{ - EntityVersion, EntityVersionKey, EntityVersions, Group, Groups, Package, PackageHash, -}; -pub use peers_map::{PeerEntry, Peers}; -pub use phase::{Phase, PHASE_SERIALIZED_LENGTH}; -pub use protocol_version::{ProtocolVersion, VersionCheckResult}; -pub use reactor_state::ReactorState; -pub use semver::{ParseSemVerError, SemVer, SEM_VER_SERIALIZED_LENGTH}; -pub use stored_value::{ - GlobalStateIdentifier, StoredValue, TypeMismatch as StoredValueTypeMismatch, -}; -pub use tagged::Tagged; -#[cfg(any(feature = "std", test))] -pub use timestamp::serde_option_time_diff; -pub use timestamp::{TimeDiff, Timestamp}; -pub use transaction::{ - AddressableEntityIdentifier, Deploy, DeployApproval, DeployApprovalsHash, DeployConfigFailure, - DeployDecodeFromJsonError, DeployError, DeployExcessiveSizeError, DeployFootprint, DeployHash, - DeployHeader, DeployId, ExecutableDeployItem, ExecutableDeployItemIdentifier, ExecutionInfo, - FinalizedApprovals, FinalizedDeployApprovals, FinalizedTransactionV1Approvals, InitiatorAddr, - NamedArg, PackageIdentifier, PricingMode, RuntimeArgs, Transaction, TransactionApprovalsHash, - TransactionEntryPoint, TransactionHash, TransactionHeader, TransactionId, - TransactionInvocationTarget, TransactionRuntime, TransactionScheduling, TransactionSessionKind, - TransactionTarget, TransactionV1, TransactionV1Approval, TransactionV1ApprovalsHash, - TransactionV1Body, TransactionV1ConfigFailure, TransactionV1DecodeFromJsonError, - TransactionV1Error, TransactionV1ExcessiveSizeError, TransactionV1Hash, TransactionV1Header, - TransferTarget, -}; -#[cfg(any(feature = "std", test))] -pub use transaction::{ - DeployBuilder, DeployBuilderError, TransactionV1Builder, TransactionV1BuilderError, -}; -pub use transfer::{ - FromStrError as TransferFromStrError, Transfer, TransferAddr, TRANSFER_ADDR_LENGTH, -}; -pub use transfer_result::{TransferResult, TransferredTo}; -pub use uref::{ - FromStrError as URefFromStrError, URef, URefAddr, UREF_ADDR_LENGTH, UREF_SERIALIZED_LENGTH, -}; -pub use validator_change::ValidatorChange; - -/// OS page size. -#[cfg(feature = "std")] -pub static OS_PAGE_SIZE: Lazy = Lazy::new(|| { - /// Sensible default for many if not all systems. - const DEFAULT_PAGE_SIZE: usize = 4096; - - // https://www.gnu.org/software/libc/manual/html_node/Sysconf.html - let value: c_long = unsafe { sysconf(_SC_PAGESIZE) }; - if value <= 0 { - DEFAULT_PAGE_SIZE - } else { - value as usize - } -}); diff --git a/casper_types_ver_2_0/src/motes.rs b/casper_types_ver_2_0/src/motes.rs deleted file mode 100644 index 8008a81c..00000000 --- a/casper_types_ver_2_0/src/motes.rs +++ /dev/null @@ -1,248 +0,0 @@ -//! The `motes` module is used for working with Motes. - -use alloc::vec::Vec; -use core::{ - fmt, - iter::Sum, - ops::{Add, Div, Mul, Sub}, -}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -use num::Zero; -use serde::{Deserialize, Serialize}; - -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - Gas, U512, -}; - -/// A struct representing a number of `Motes`. -#[derive(Debug, Default, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -pub struct Motes(U512); - -impl Motes { - /// Constructs a new `Motes`. - pub fn new(value: U512) -> Motes { - Motes(value) - } - - /// Checked integer addition. Computes `self + rhs`, returning `None` if overflow occurred. - pub fn checked_add(&self, rhs: Self) -> Option { - self.0.checked_add(rhs.value()).map(Self::new) - } - - /// Checked integer subtraction. Computes `self - rhs`, returning `None` if underflow occurred. - pub fn checked_sub(&self, rhs: Self) -> Option { - self.0.checked_sub(rhs.value()).map(Self::new) - } - - /// Returns the inner `U512` value. - pub fn value(&self) -> U512 { - self.0 - } - - /// Converts the given `gas` to `Motes` by multiplying them by `conv_rate`. - /// - /// Returns `None` if an arithmetic overflow occurred. - pub fn from_gas(gas: Gas, conv_rate: u64) -> Option { - gas.value() - .checked_mul(U512::from(conv_rate)) - .map(Self::new) - } -} - -impl fmt::Display for Motes { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{:?}", self.0) - } -} - -impl Add for Motes { - type Output = Motes; - - fn add(self, rhs: Self) -> Self::Output { - let val = self.value() + rhs.value(); - Motes::new(val) - } -} - -impl Sub for Motes { - type Output = Motes; - - fn sub(self, rhs: Self) -> Self::Output { - let val = self.value() - rhs.value(); - Motes::new(val) - } -} - -impl Div for Motes { - type Output = Motes; - - fn div(self, rhs: Self) -> Self::Output { - let val = self.value() / rhs.value(); - Motes::new(val) - } -} - -impl Mul for Motes { - type Output = Motes; - - fn mul(self, rhs: Self) -> Self::Output { - let val = self.value() * rhs.value(); - Motes::new(val) - } -} - -impl Zero for Motes { - fn zero() -> Self { - Motes::new(U512::zero()) - } - - fn is_zero(&self) -> bool { - self.0.is_zero() - } -} - -impl Sum for Motes { - fn sum>(iter: I) -> Self { - iter.fold(Motes::zero(), Add::add) - } -} - -impl ToBytes for Motes { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - self.0.to_bytes() - } - - fn serialized_length(&self) -> usize { - self.0.serialized_length() - } -} - -impl FromBytes for Motes { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (value, remainder) = FromBytes::from_bytes(bytes)?; - Ok((Motes::new(value), remainder)) - } -} - -#[cfg(test)] -mod tests { - use crate::U512; - - use crate::{Gas, Motes}; - - #[test] - fn should_be_able_to_get_instance_of_motes() { - let initial_value = 1; - let motes = Motes::new(U512::from(initial_value)); - assert_eq!( - initial_value, - motes.value().as_u64(), - "should have equal value" - ) - } - - #[test] - fn should_be_able_to_compare_two_instances_of_motes() { - let left_motes = Motes::new(U512::from(1)); - let right_motes = Motes::new(U512::from(1)); - assert_eq!(left_motes, right_motes, "should be equal"); - let right_motes = Motes::new(U512::from(2)); - assert_ne!(left_motes, right_motes, "should not be equal") - } - - #[test] - fn should_be_able_to_add_two_instances_of_motes() { - let left_motes = Motes::new(U512::from(1)); - let right_motes = Motes::new(U512::from(1)); - let expected_motes = Motes::new(U512::from(2)); - assert_eq!( - (left_motes + right_motes), - expected_motes, - "should be equal" - ) - } - - #[test] - fn should_be_able_to_subtract_two_instances_of_motes() { - let left_motes = Motes::new(U512::from(1)); - let right_motes = Motes::new(U512::from(1)); - let expected_motes = Motes::new(U512::from(0)); - assert_eq!( - (left_motes - right_motes), - expected_motes, - "should be equal" - ) - } - - #[test] - fn should_be_able_to_multiply_two_instances_of_motes() { - let left_motes = Motes::new(U512::from(100)); - let right_motes = Motes::new(U512::from(10)); - let expected_motes = Motes::new(U512::from(1000)); - assert_eq!( - (left_motes * right_motes), - expected_motes, - "should be equal" - ) - } - - #[test] - fn should_be_able_to_divide_two_instances_of_motes() { - let left_motes = Motes::new(U512::from(1000)); - let right_motes = Motes::new(U512::from(100)); - let expected_motes = Motes::new(U512::from(10)); - assert_eq!( - (left_motes / right_motes), - expected_motes, - "should be equal" - ) - } - - #[test] - fn should_be_able_to_convert_from_motes() { - let gas = Gas::new(U512::from(100)); - let motes = Motes::from_gas(gas, 10).expect("should have value"); - let expected_motes = Motes::new(U512::from(1000)); - assert_eq!(motes, expected_motes, "should be equal") - } - - #[test] - fn should_be_able_to_default() { - let motes = Motes::default(); - let expected_motes = Motes::new(U512::from(0)); - assert_eq!(motes, expected_motes, "should be equal") - } - - #[test] - fn should_be_able_to_compare_relative_value() { - let left_motes = Motes::new(U512::from(100)); - let right_motes = Motes::new(U512::from(10)); - assert!(left_motes > right_motes, "should be gt"); - let right_motes = Motes::new(U512::from(100)); - assert!(left_motes >= right_motes, "should be gte"); - assert!(left_motes <= right_motes, "should be lte"); - let left_motes = Motes::new(U512::from(10)); - assert!(left_motes < right_motes, "should be lt"); - } - - #[test] - fn should_default() { - let left_motes = Motes::new(U512::from(0)); - let right_motes = Motes::default(); - assert_eq!(left_motes, right_motes, "should be equal"); - let u512 = U512::zero(); - assert_eq!(left_motes.value(), u512, "should be equal"); - } - - #[test] - fn should_support_checked_mul_from_gas() { - let gas = Gas::new(U512::MAX); - let conv_rate = 10; - let maybe = Motes::from_gas(gas, conv_rate); - assert!(maybe.is_none(), "should be none due to overflow"); - } -} diff --git a/casper_types_ver_2_0/src/package.rs b/casper_types_ver_2_0/src/package.rs deleted file mode 100644 index 72ac1ce4..00000000 --- a/casper_types_ver_2_0/src/package.rs +++ /dev/null @@ -1,1567 +0,0 @@ -//! Module containing the Package and associated types for addressable entities. - -use alloc::{ - collections::{BTreeMap, BTreeSet}, - format, - string::String, - vec::Vec, -}; -use core::{ - convert::TryFrom, - fmt::{self, Debug, Display, Formatter}, -}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(any(feature = "testing", test))] -use rand::{ - distributions::{Distribution, Standard}, - Rng, -}; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{de::Error as SerdeError, Deserialize, Deserializer, Serialize, Serializer}; -#[cfg(feature = "json-schema")] -use serde_map_to_array::KeyValueJsonSchema; -use serde_map_to_array::{BTreeMapToArray, KeyValueLabels}; - -use crate::{ - account::AccountHash, - addressable_entity::{AssociatedKeys, Error, FromStrError, Weight}, - bytesrepr::{self, FromBytes, ToBytes, U32_SERIALIZED_LENGTH, U8_SERIALIZED_LENGTH}, - checksummed_hex, - crypto::{self, PublicKey}, - system::SystemEntityType, - uref::URef, - AddressableEntityHash, CLType, CLTyped, HashAddr, Key, Tagged, BLAKE2B_DIGEST_LENGTH, - KEY_HASH_LENGTH, -}; - -/// Maximum number of distinct user groups. -pub const MAX_GROUPS: u8 = 10; -/// Maximum number of URefs which can be assigned across all user groups. -pub const MAX_TOTAL_UREFS: usize = 100; - -/// The tag for Contract Packages associated with Wasm stored on chain. -pub const PACKAGE_KIND_WASM_TAG: u8 = 0; -/// The tag for Contract Package associated with a native contract implementation. -pub const PACKAGE_KIND_SYSTEM_CONTRACT_TAG: u8 = 1; -/// The tag for Contract Package associated with an Account hash. -pub const PACKAGE_KIND_ACCOUNT_TAG: u8 = 2; -/// The tag for Contract Packages associated with legacy packages. -pub const PACKAGE_KIND_LEGACY_TAG: u8 = 3; - -const PACKAGE_STRING_PREFIX: &str = "contract-package-"; -// We need to support the legacy prefix of "contract-package-wasm". -const PACKAGE_STRING_LEGACY_EXTRA_PREFIX: &str = "wasm"; - -/// Associated error type of `TryFrom<&[u8]>` for `ContractHash`. -#[derive(Debug)] -pub struct TryFromSliceForPackageHashError(()); - -impl Display for TryFromSliceForPackageHashError { - fn fmt(&self, f: &mut Formatter) -> fmt::Result { - write!(f, "failed to retrieve from slice") - } -} - -/// A (labelled) "user group". Each method of a versioned contract may be -/// associated with one or more user groups which are allowed to call it. -#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -pub struct Group(String); - -impl Group { - /// Basic constructor - pub fn new>(s: T) -> Self { - Group(s.into()) - } - - /// Retrieves underlying name. - pub fn value(&self) -> &str { - &self.0 - } -} - -impl From for String { - fn from(group: Group) -> Self { - group.0 - } -} - -impl ToBytes for Group { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - self.0.to_bytes() - } - - fn serialized_length(&self) -> usize { - self.0.serialized_length() - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.value().write_bytes(writer)?; - Ok(()) - } -} - -impl FromBytes for Group { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - String::from_bytes(bytes).map(|(label, bytes)| (Group(label), bytes)) - } -} - -/// Automatically incremented value for a contract version within a major `ProtocolVersion`. -pub type EntityVersion = u32; - -/// Within each discrete major `ProtocolVersion`, entity version resets to this value. -pub const ENTITY_INITIAL_VERSION: EntityVersion = 1; - -/// Major element of `ProtocolVersion` a `EntityVersion` is compatible with. -pub type ProtocolVersionMajor = u32; - -/// Major element of `ProtocolVersion` combined with `EntityVersion`. -#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -pub struct EntityVersionKey { - /// Major element of `ProtocolVersion` a `ContractVersion` is compatible with. - protocol_version_major: ProtocolVersionMajor, - /// Automatically incremented value for a contract version within a major `ProtocolVersion`. - entity_version: EntityVersion, -} - -impl EntityVersionKey { - /// Returns a new instance of ContractVersionKey with provided values. - pub fn new( - protocol_version_major: ProtocolVersionMajor, - entity_version: EntityVersion, - ) -> Self { - Self { - protocol_version_major, - entity_version, - } - } - - /// Returns the major element of the protocol version this contract is compatible with. - pub fn protocol_version_major(self) -> ProtocolVersionMajor { - self.protocol_version_major - } - - /// Returns the contract version within the protocol major version. - pub fn entity_version(self) -> EntityVersion { - self.entity_version - } -} - -impl From for (ProtocolVersionMajor, EntityVersion) { - fn from(entity_version_key: EntityVersionKey) -> Self { - ( - entity_version_key.protocol_version_major, - entity_version_key.entity_version, - ) - } -} - -impl ToBytes for EntityVersionKey { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - ENTITY_VERSION_KEY_SERIALIZED_LENGTH - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.protocol_version_major.write_bytes(writer)?; - self.entity_version.write_bytes(writer) - } -} - -impl FromBytes for EntityVersionKey { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (protocol_version_major, remainder) = ProtocolVersionMajor::from_bytes(bytes)?; - let (entity_version, remainder) = EntityVersion::from_bytes(remainder)?; - Ok(( - EntityVersionKey { - protocol_version_major, - entity_version, - }, - remainder, - )) - } -} - -impl Display for EntityVersionKey { - fn fmt(&self, f: &mut Formatter) -> fmt::Result { - write!(f, "{}.{}", self.protocol_version_major, self.entity_version) - } -} - -/// Serialized length of `EntityVersionKey`. -pub const ENTITY_VERSION_KEY_SERIALIZED_LENGTH: usize = - U32_SERIALIZED_LENGTH + U32_SERIALIZED_LENGTH; - -/// Collection of entity versions. -#[derive(Clone, PartialEq, Eq, Default, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[serde(transparent, deny_unknown_fields)] -pub struct EntityVersions( - #[serde( - with = "BTreeMapToArray::" - )] - BTreeMap, -); - -impl EntityVersions { - /// Constructs a new, empty `EntityVersions`. - pub const fn new() -> Self { - EntityVersions(BTreeMap::new()) - } - - /// Returns an iterator over the `AddressableEntityHash`s (i.e. the map's values). - pub fn contract_hashes(&self) -> impl Iterator { - self.0.values() - } - - /// Returns the `AddressableEntityHash` under the key - pub fn get(&self, key: &EntityVersionKey) -> Option<&AddressableEntityHash> { - self.0.get(key) - } - - /// Retrieve the first entity version key if it exists - pub fn maybe_first(&mut self) -> Option<(EntityVersionKey, AddressableEntityHash)> { - if let Some((entity_version_key, entity_hash)) = self.0.iter().next() { - Some((*entity_version_key, *entity_hash)) - } else { - None - } - } -} - -impl ToBytes for EntityVersions { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - self.0.to_bytes() - } - - fn serialized_length(&self) -> usize { - self.0.serialized_length() - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.0.write_bytes(writer) - } -} - -impl FromBytes for EntityVersions { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (versions, remainder) = - BTreeMap::::from_bytes(bytes)?; - Ok((EntityVersions(versions), remainder)) - } -} - -impl From> for EntityVersions { - fn from(value: BTreeMap) -> Self { - EntityVersions(value) - } -} - -struct EntityVersionLabels; - -impl KeyValueLabels for EntityVersionLabels { - const KEY: &'static str = "entity_version_key"; - const VALUE: &'static str = "addressable_entity_hash"; -} - -#[cfg(feature = "json-schema")] -impl KeyValueJsonSchema for EntityVersionLabels { - const JSON_SCHEMA_KV_NAME: Option<&'static str> = Some("EntityVersionAndHash"); -} -/// Collection of named groups. -#[derive(Clone, PartialEq, Eq, Default, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[serde(transparent, deny_unknown_fields)] -pub struct Groups( - #[serde(with = "BTreeMapToArray::, GroupLabels>")] - BTreeMap>, -); - -impl Groups { - /// Constructs a new, empty `Groups`. - pub const fn new() -> Self { - Groups(BTreeMap::new()) - } - - /// Inserts a named group. - /// - /// If the map did not have this name present, `None` is returned. If the map did have this - /// name present, its collection of `URef`s is overwritten, and the collection is returned. - pub fn insert(&mut self, name: Group, urefs: BTreeSet) -> Option> { - self.0.insert(name, urefs) - } - - /// Returns `true` if the named group exists in the collection. - pub fn contains(&self, name: &Group) -> bool { - self.0.contains_key(name) - } - - /// Returns a reference to the collection of `URef`s under the given `name` if any. - pub fn get(&self, name: &Group) -> Option<&BTreeSet> { - self.0.get(name) - } - - /// Returns a mutable reference to the collection of `URef`s under the given `name` if any. - pub fn get_mut(&mut self, name: &Group) -> Option<&mut BTreeSet> { - self.0.get_mut(name) - } - - /// Returns the number of named groups. - pub fn len(&self) -> usize { - self.0.len() - } - - /// Returns `true` if there are no named groups. - pub fn is_empty(&self) -> bool { - self.0.is_empty() - } - - /// Returns an iterator over the `Key`s (i.e. the map's values). - pub fn keys(&self) -> impl Iterator> { - self.0.values() - } - - /// Returns the total number of `URef`s contained in all the groups. - pub fn total_urefs(&self) -> usize { - self.0.values().map(|urefs| urefs.len()).sum() - } -} - -impl ToBytes for Groups { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - self.0.to_bytes() - } - - fn serialized_length(&self) -> usize { - self.0.serialized_length() - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.0.write_bytes(writer) - } -} - -impl FromBytes for Groups { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (groups, remainder) = BTreeMap::>::from_bytes(bytes)?; - Ok((Groups(groups), remainder)) - } -} - -struct GroupLabels; - -impl KeyValueLabels for GroupLabels { - const KEY: &'static str = "group_name"; - const VALUE: &'static str = "group_users"; -} - -#[cfg(feature = "json-schema")] -impl KeyValueJsonSchema for GroupLabels { - const JSON_SCHEMA_KV_NAME: Option<&'static str> = Some("NamedUserGroup"); -} - -#[cfg(any(feature = "testing", feature = "gens", test))] -impl From>> for Groups { - fn from(value: BTreeMap>) -> Self { - Groups(value) - } -} - -/// A newtype wrapping a `HashAddr` which references a [`Package`] in the global state. -#[derive(Default, PartialOrd, Ord, PartialEq, Eq, Hash, Clone, Copy)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr( - feature = "json-schema", - derive(JsonSchema), - schemars(description = "The hex-encoded address of the Package.") -)] -pub struct PackageHash( - #[cfg_attr(feature = "json-schema", schemars(skip, with = "String"))] HashAddr, -); - -impl PackageHash { - /// Constructs a new `PackageHash` from the raw bytes of the package hash. - pub const fn new(value: HashAddr) -> PackageHash { - PackageHash(value) - } - - /// Returns the raw bytes of the entity hash as an array. - pub fn value(&self) -> HashAddr { - self.0 - } - - /// Returns the raw bytes of the entity hash as a `slice`. - pub fn as_bytes(&self) -> &[u8] { - &self.0 - } - - /// Formats the `PackageHash` for users getting and putting. - pub fn to_formatted_string(self) -> String { - format!("{}{}", PACKAGE_STRING_PREFIX, base16::encode_lower(&self.0),) - } - - /// Parses a string formatted as per `Self::to_formatted_string()` into a - /// `PackageHash`. - pub fn from_formatted_str(input: &str) -> Result { - let remainder = input - .strip_prefix(PACKAGE_STRING_PREFIX) - .ok_or(FromStrError::InvalidPrefix)?; - - let hex_addr = remainder - .strip_prefix(PACKAGE_STRING_LEGACY_EXTRA_PREFIX) - .unwrap_or(remainder); - - let bytes = HashAddr::try_from(checksummed_hex::decode(hex_addr)?.as_ref())?; - Ok(PackageHash(bytes)) - } - - /// Parses a `PublicKey` and outputs the corresponding account hash. - pub fn from_public_key( - public_key: &PublicKey, - blake2b_hash_fn: impl Fn(Vec) -> [u8; BLAKE2B_DIGEST_LENGTH], - ) -> Self { - const SYSTEM_LOWERCASE: &str = "system"; - const ED25519_LOWERCASE: &str = "ed25519"; - const SECP256K1_LOWERCASE: &str = "secp256k1"; - - let algorithm_name = match public_key { - PublicKey::System => SYSTEM_LOWERCASE, - PublicKey::Ed25519(_) => ED25519_LOWERCASE, - PublicKey::Secp256k1(_) => SECP256K1_LOWERCASE, - }; - let public_key_bytes: Vec = public_key.into(); - - // Prepare preimage based on the public key parameters. - let preimage = { - let mut data = Vec::with_capacity(algorithm_name.len() + public_key_bytes.len() + 1); - data.extend(algorithm_name.as_bytes()); - data.push(0); - data.extend(public_key_bytes); - data - }; - // Hash the preimage data using blake2b256 and return it. - let digest = blake2b_hash_fn(preimage); - Self::new(digest) - } -} - -impl Display for PackageHash { - fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result { - write!(f, "{}", base16::encode_lower(&self.0)) - } -} - -impl Debug for PackageHash { - fn fmt(&self, f: &mut Formatter) -> core::fmt::Result { - write!(f, "PackageHash({})", base16::encode_lower(&self.0)) - } -} - -impl CLTyped for PackageHash { - fn cl_type() -> CLType { - CLType::ByteArray(KEY_HASH_LENGTH as u32) - } -} - -impl ToBytes for PackageHash { - #[inline(always)] - fn to_bytes(&self) -> Result, bytesrepr::Error> { - self.0.to_bytes() - } - - #[inline(always)] - fn serialized_length(&self) -> usize { - self.0.serialized_length() - } - - #[inline(always)] - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - writer.extend_from_slice(&self.0); - Ok(()) - } -} - -impl FromBytes for PackageHash { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (bytes, rem) = FromBytes::from_bytes(bytes)?; - Ok((PackageHash::new(bytes), rem)) - } -} - -impl From<[u8; 32]> for PackageHash { - fn from(bytes: [u8; 32]) -> Self { - PackageHash(bytes) - } -} - -impl Serialize for PackageHash { - fn serialize(&self, serializer: S) -> Result { - if serializer.is_human_readable() { - self.to_formatted_string().serialize(serializer) - } else { - self.0.serialize(serializer) - } - } -} - -impl<'de> Deserialize<'de> for PackageHash { - fn deserialize>(deserializer: D) -> Result { - if deserializer.is_human_readable() { - let formatted_string = String::deserialize(deserializer)?; - PackageHash::from_formatted_str(&formatted_string).map_err(SerdeError::custom) - } else { - let bytes = HashAddr::deserialize(deserializer)?; - Ok(PackageHash(bytes)) - } - } -} - -impl AsRef<[u8]> for PackageHash { - fn as_ref(&self) -> &[u8] { - self.0.as_ref() - } -} - -impl TryFrom<&[u8]> for PackageHash { - type Error = TryFromSliceForPackageHashError; - - fn try_from(bytes: &[u8]) -> Result { - HashAddr::try_from(bytes) - .map(PackageHash::new) - .map_err(|_| TryFromSliceForPackageHashError(())) - } -} - -impl TryFrom<&Vec> for PackageHash { - type Error = TryFromSliceForPackageHashError; - - fn try_from(bytes: &Vec) -> Result { - HashAddr::try_from(bytes as &[u8]) - .map(PackageHash::new) - .map_err(|_| TryFromSliceForPackageHashError(())) - } -} - -impl From<&PublicKey> for PackageHash { - fn from(public_key: &PublicKey) -> Self { - PackageHash::from_public_key(public_key, crypto::blake2b) - } -} - -/// A enum to determine the lock status of the package. -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -pub enum PackageStatus { - /// The package is locked and cannot be versioned. - Locked, - /// The package is unlocked and can be versioned. - Unlocked, -} - -impl PackageStatus { - /// Create a new status flag based on a boolean value - pub fn new(is_locked: bool) -> Self { - if is_locked { - PackageStatus::Locked - } else { - PackageStatus::Unlocked - } - } -} - -impl Default for PackageStatus { - fn default() -> Self { - Self::Unlocked - } -} - -impl ToBytes for PackageStatus { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut result = bytesrepr::allocate_buffer(self)?; - match self { - PackageStatus::Unlocked => result.append(&mut false.to_bytes()?), - PackageStatus::Locked => result.append(&mut true.to_bytes()?), - } - Ok(result) - } - - fn serialized_length(&self) -> usize { - match self { - PackageStatus::Unlocked => false.serialized_length(), - PackageStatus::Locked => true.serialized_length(), - } - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - match self { - PackageStatus::Locked => writer.push(u8::from(true)), - PackageStatus::Unlocked => writer.push(u8::from(false)), - } - Ok(()) - } -} - -impl FromBytes for PackageStatus { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (val, bytes) = bool::from_bytes(bytes)?; - let status = PackageStatus::new(val); - Ok((status, bytes)) - } -} - -#[allow(missing_docs)] -#[derive(Debug, Copy, Clone, PartialOrd, Ord, PartialEq, Eq, Hash, Serialize, Deserialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[repr(u8)] -pub enum PackageKindTag { - System = 0, - Account = 1, - SmartContract = 2, -} - -impl ToBytes for PackageKindTag { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - (*self as u8).to_bytes() - } - - fn serialized_length(&self) -> usize { - U8_SERIALIZED_LENGTH - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - (*self as u8).write_bytes(writer) - } -} - -impl FromBytes for PackageKindTag { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (package_kind_tag, remainder) = u8::from_bytes(bytes)?; - match package_kind_tag { - package_kind_tag if package_kind_tag == PackageKindTag::System as u8 => { - Ok((PackageKindTag::System, remainder)) - } - package_kind_tag if package_kind_tag == PackageKindTag::Account as u8 => { - Ok((PackageKindTag::Account, remainder)) - } - package_kind_tag if package_kind_tag == PackageKindTag::SmartContract as u8 => { - Ok((PackageKindTag::SmartContract, remainder)) - } - _ => Err(bytesrepr::Error::Formatting), - } - } -} - -impl Display for PackageKindTag { - fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { - match self { - PackageKindTag::System => { - write!(f, "system") - } - PackageKindTag::Account => { - write!(f, "account") - } - PackageKindTag::SmartContract => { - write!(f, "smart-contract") - } - } - } -} - -#[cfg(any(feature = "testing", test))] -impl Distribution for Standard { - fn sample(&self, rng: &mut R) -> PackageKindTag { - match rng.gen_range(0..=1) { - 0 => PackageKindTag::System, - 1 => PackageKindTag::Account, - 2 => PackageKindTag::SmartContract, - _ => unreachable!(), - } - } -} - -#[derive( - Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default, Serialize, Deserialize, -)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -/// The type of Package. -pub enum PackageKind { - /// Package associated with a native contract implementation. - System(SystemEntityType), - /// Package associated with an Account hash. - Account(AccountHash), - /// Packages associated with Wasm stored on chain. - #[default] - SmartContract, -} - -impl PackageKind { - /// Returns the Account hash associated with a Package based on the package kind. - pub fn maybe_account_hash(&self) -> Option { - match self { - Self::Account(account_hash) => Some(*account_hash), - Self::SmartContract | Self::System(_) => None, - } - } - - /// Returns the associated key set based on the Account hash set in the package kind. - pub fn associated_keys(&self) -> AssociatedKeys { - match self { - Self::Account(account_hash) => AssociatedKeys::new(*account_hash, Weight::new(1)), - Self::SmartContract | Self::System(_) => AssociatedKeys::default(), - } - } - - /// Returns if the current package is either a system contract or the system entity. - pub fn is_system(&self) -> bool { - matches!(self, Self::System(_)) - } - - /// Returns if the current package is the system mint. - pub fn is_system_mint(&self) -> bool { - matches!(self, Self::System(SystemEntityType::Mint)) - } - - /// Returns if the current package is the system auction. - pub fn is_system_auction(&self) -> bool { - matches!(self, Self::System(SystemEntityType::Auction)) - } - - /// Returns if the current package is associated with the system addressable entity. - pub fn is_system_account(&self) -> bool { - match self { - Self::Account(account_hash) => { - if *account_hash == PublicKey::System.to_account_hash() { - return true; - } - false - } - _ => false, - } - } -} - -impl Tagged for PackageKind { - fn tag(&self) -> PackageKindTag { - match self { - PackageKind::System(_) => PackageKindTag::System, - PackageKind::Account(_) => PackageKindTag::Account, - PackageKind::SmartContract => PackageKindTag::SmartContract, - } - } -} - -impl Tagged for PackageKind { - fn tag(&self) -> u8 { - let package_kind_tag: PackageKindTag = self.tag(); - package_kind_tag as u8 - } -} - -impl ToBytes for PackageKind { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - U8_SERIALIZED_LENGTH - + match self { - PackageKind::SmartContract => 0, - PackageKind::System(system_entity_type) => system_entity_type.serialized_length(), - PackageKind::Account(account_hash) => account_hash.serialized_length(), - } - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - match self { - PackageKind::SmartContract => { - writer.push(self.tag()); - Ok(()) - } - PackageKind::System(system_entity_type) => { - writer.push(self.tag()); - system_entity_type.write_bytes(writer) - } - PackageKind::Account(account_hash) => { - writer.push(self.tag()); - account_hash.write_bytes(writer) - } - } - } -} - -impl FromBytes for PackageKind { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (tag, remainder) = u8::from_bytes(bytes)?; - match tag { - tag if tag == PackageKindTag::System as u8 => { - let (entity_type, remainder) = SystemEntityType::from_bytes(remainder)?; - Ok((PackageKind::System(entity_type), remainder)) - } - tag if tag == PackageKindTag::Account as u8 => { - let (account_hash, remainder) = AccountHash::from_bytes(remainder)?; - Ok((PackageKind::Account(account_hash), remainder)) - } - tag if tag == PackageKindTag::SmartContract as u8 => { - Ok((PackageKind::SmartContract, remainder)) - } - _ => Err(bytesrepr::Error::Formatting), - } - } -} - -impl Display for PackageKind { - fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { - match self { - PackageKind::System(system_entity) => { - write!(f, "PackageKind::System({})", system_entity) - } - PackageKind::Account(account_hash) => { - write!(f, "PackageKind::Account({})", account_hash) - } - PackageKind::SmartContract => { - write!(f, "PackageKind::SmartContract") - } - } - } -} - -#[cfg(any(feature = "testing", test))] -impl Distribution for Standard { - fn sample(&self, rng: &mut R) -> PackageKind { - match rng.gen_range(0..=2) { - 0 => PackageKind::System(rng.gen()), - 1 => PackageKind::Account(rng.gen()), - 2 => PackageKind::SmartContract, - _ => unreachable!(), - } - } -} - -/// Entity definition, metadata, and security container. -#[derive(Debug, Clone, PartialEq, Eq, Default, Serialize, Deserialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -pub struct Package { - /// Key used to add or disable versions. - access_key: URef, - /// All versions (enabled & disabled). - versions: EntityVersions, - /// Collection of disabled entity versions. The runtime will not permit disabled entity - /// versions to be executed. - disabled_versions: BTreeSet, - /// Mapping maintaining the set of URefs associated with each "user group". This can be used to - /// control access to methods in a particular version of the entity. A method is callable by - /// any context which "knows" any of the URefs associated with the method's user group. - groups: Groups, - /// A flag that determines whether a entity is locked - lock_status: PackageStatus, - /// The kind of package. - package_kind: PackageKind, -} - -impl CLTyped for Package { - fn cl_type() -> CLType { - CLType::Any - } -} - -impl Package { - /// Create new `Package` (with no versions) from given access key. - pub fn new( - access_key: URef, - versions: EntityVersions, - disabled_versions: BTreeSet, - groups: Groups, - lock_status: PackageStatus, - package_kind: PackageKind, - ) -> Self { - Package { - access_key, - versions, - disabled_versions, - groups, - lock_status, - package_kind, - } - } - - /// Enable the entity version corresponding to the given hash (if it exists). - pub fn enable_version(&mut self, entity_hash: AddressableEntityHash) -> Result<(), Error> { - let entity_version_key = self - .find_entity_version_key_by_hash(&entity_hash) - .copied() - .ok_or(Error::EntityNotFound)?; - - self.disabled_versions.remove(&entity_version_key); - - Ok(()) - } - - /// Get the access key for this entity. - pub fn access_key(&self) -> URef { - self.access_key - } - - /// Get the mutable group definitions for this entity. - pub fn groups_mut(&mut self) -> &mut Groups { - &mut self.groups - } - - /// Get the group definitions for this entity. - pub fn groups(&self) -> &Groups { - &self.groups - } - - /// Adds new group to this entity. - pub fn add_group(&mut self, group: Group, urefs: BTreeSet) { - let v = self.groups.0.entry(group).or_default(); - v.extend(urefs) - } - - /// Lookup the entity hash for a given entity version (if present) - pub fn lookup_entity_hash( - &self, - entity_version_key: EntityVersionKey, - ) -> Option<&AddressableEntityHash> { - if !self.is_version_enabled(entity_version_key) { - return None; - } - self.versions.0.get(&entity_version_key) - } - - /// Checks if the given entity version exists and is available for use. - pub fn is_version_enabled(&self, entity_version_key: EntityVersionKey) -> bool { - !self.disabled_versions.contains(&entity_version_key) - && self.versions.0.contains_key(&entity_version_key) - } - - /// Returns `true` if the given entity hash exists and is enabled. - pub fn is_entity_enabled(&self, entity_hash: &AddressableEntityHash) -> bool { - match self.find_entity_version_key_by_hash(entity_hash) { - Some(version_key) => !self.disabled_versions.contains(version_key), - None => false, - } - } - - /// Insert a new entity version; the next sequential version number will be issued. - pub fn insert_entity_version( - &mut self, - protocol_version_major: ProtocolVersionMajor, - entity_hash: AddressableEntityHash, - ) -> EntityVersionKey { - let contract_version = self.next_entity_version_for(protocol_version_major); - let key = EntityVersionKey::new(protocol_version_major, contract_version); - self.versions.0.insert(key, entity_hash); - key - } - - /// Disable the entity version corresponding to the given hash (if it exists). - pub fn disable_entity_version( - &mut self, - entity_hash: AddressableEntityHash, - ) -> Result<(), Error> { - let entity_version_key = self - .versions - .0 - .iter() - .filter_map(|(k, v)| if *v == entity_hash { Some(*k) } else { None }) - .next() - .ok_or(Error::EntityNotFound)?; - - if !self.disabled_versions.contains(&entity_version_key) { - self.disabled_versions.insert(entity_version_key); - } - - Ok(()) - } - - fn find_entity_version_key_by_hash( - &self, - entity_hash: &AddressableEntityHash, - ) -> Option<&EntityVersionKey> { - self.versions - .0 - .iter() - .filter_map(|(k, v)| if v == entity_hash { Some(k) } else { None }) - .next() - } - - /// Returns reference to all of this entity's versions. - pub fn versions(&self) -> &EntityVersions { - &self.versions - } - - /// Returns all of this entity's enabled entity versions. - pub fn enabled_versions(&self) -> EntityVersions { - let mut ret = EntityVersions::new(); - for version in &self.versions.0 { - if !self.is_version_enabled(*version.0) { - continue; - } - ret.0.insert(*version.0, *version.1); - } - ret - } - - /// Returns mutable reference to all of this entity's versions (enabled and disabled). - pub fn versions_mut(&mut self) -> &mut EntityVersions { - &mut self.versions - } - - /// Consumes the object and returns all of this entity's versions (enabled and disabled). - pub fn take_versions(self) -> EntityVersions { - self.versions - } - - /// Returns all of this entity's disabled versions. - pub fn disabled_versions(&self) -> &BTreeSet { - &self.disabled_versions - } - - /// Returns mut reference to all of this entity's disabled versions. - pub fn disabled_versions_mut(&mut self) -> &mut BTreeSet { - &mut self.disabled_versions - } - - /// Removes a group from this entity (if it exists). - pub fn remove_group(&mut self, group: &Group) -> bool { - self.groups.0.remove(group).is_some() - } - - /// Gets the next available entity version for the given protocol version - fn next_entity_version_for(&self, protocol_version: ProtocolVersionMajor) -> EntityVersion { - let current_version = self - .versions - .0 - .keys() - .rev() - .find_map(|&entity_version_key| { - if entity_version_key.protocol_version_major() == protocol_version { - Some(entity_version_key.entity_version()) - } else { - None - } - }) - .unwrap_or(0); - - current_version + 1 - } - - /// Return the entity version key for the newest enabled entity version. - pub fn current_entity_version(&self) -> Option { - self.enabled_versions().0.keys().next_back().copied() - } - - /// Return the entity hash for the newest enabled entity version. - pub fn current_entity_hash(&self) -> Option { - self.enabled_versions().0.values().next_back().copied() - } - - /// Return the Key representation for the previous entity. - pub fn previous_entity_key(&self) -> Option { - if let Some(previous_entity_hash) = self.current_entity_hash() { - return Some(Key::addressable_entity_key( - self.get_package_kind().tag(), - previous_entity_hash, - )); - } - None - } - - /// Return the lock status of the entity package. - pub fn is_locked(&self) -> bool { - if self.versions.0.is_empty() { - return false; - } - - match self.lock_status { - PackageStatus::Unlocked => false, - PackageStatus::Locked => true, - } - } - - // TODO: Check the history of this. - /// Return the package status itself - pub fn get_lock_status(&self) -> PackageStatus { - self.lock_status.clone() - } - - /// Returns the kind of Package. - pub fn get_package_kind(&self) -> PackageKind { - self.package_kind - } - - /// Is the given Package associated to an Account. - pub fn is_account_kind(&self) -> bool { - matches!(self.package_kind, PackageKind::Account(_)) - } - - /// Update the entity package kind. - pub fn update_package_kind(&mut self, new_package_kind: PackageKind) { - self.package_kind = new_package_kind - } -} - -impl ToBytes for Package { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - self.access_key.serialized_length() - + self.versions.serialized_length() - + self.disabled_versions.serialized_length() - + self.groups.serialized_length() - + self.lock_status.serialized_length() - + self.package_kind.serialized_length() - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.access_key().write_bytes(writer)?; - self.versions().write_bytes(writer)?; - self.disabled_versions().write_bytes(writer)?; - self.groups().write_bytes(writer)?; - self.lock_status.write_bytes(writer)?; - self.package_kind.write_bytes(writer)?; - Ok(()) - } -} - -impl FromBytes for Package { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (access_key, bytes) = URef::from_bytes(bytes)?; - let (versions, bytes) = EntityVersions::from_bytes(bytes)?; - let (disabled_versions, bytes) = BTreeSet::::from_bytes(bytes)?; - let (groups, bytes) = Groups::from_bytes(bytes)?; - let (lock_status, bytes) = PackageStatus::from_bytes(bytes)?; - let (package_kind, bytes) = PackageKind::from_bytes(bytes)?; - let result = Package { - access_key, - versions, - disabled_versions, - groups, - lock_status, - package_kind, - }; - - Ok((result, bytes)) - } -} - -#[cfg(test)] -mod tests { - use core::iter::FromIterator; - - use super::*; - use crate::{ - AccessRights, EntityVersionKey, EntryPoint, EntryPointAccess, EntryPointType, Parameter, - ProtocolVersion, URef, - }; - use alloc::borrow::ToOwned; - - const ENTITY_HASH_V1: AddressableEntityHash = AddressableEntityHash::new([42; 32]); - const ENTITY_HASH_V2: AddressableEntityHash = AddressableEntityHash::new([84; 32]); - - fn make_package_with_two_versions() -> Package { - let mut package = Package::new( - URef::new([0; 32], AccessRights::NONE), - EntityVersions::default(), - BTreeSet::new(), - Groups::default(), - PackageStatus::default(), - PackageKind::SmartContract, - ); - - // add groups - { - let group_urefs = { - let mut ret = BTreeSet::new(); - ret.insert(URef::new([1; 32], AccessRights::READ)); - ret - }; - - package - .groups_mut() - .insert(Group::new("Group 1"), group_urefs.clone()); - - package - .groups_mut() - .insert(Group::new("Group 2"), group_urefs); - } - - // add entry_points - let _entry_points = { - let mut ret = BTreeMap::new(); - let entrypoint = EntryPoint::new( - "method0".to_string(), - vec![], - CLType::U32, - EntryPointAccess::groups(&["Group 2"]), - EntryPointType::Session, - ); - ret.insert(entrypoint.name().to_owned(), entrypoint); - let entrypoint = EntryPoint::new( - "method1".to_string(), - vec![Parameter::new("Foo", CLType::U32)], - CLType::U32, - EntryPointAccess::groups(&["Group 1"]), - EntryPointType::Session, - ); - ret.insert(entrypoint.name().to_owned(), entrypoint); - ret - }; - - let protocol_version = ProtocolVersion::V1_0_0; - - let v1 = package.insert_entity_version(protocol_version.value().major, ENTITY_HASH_V1); - let v2 = package.insert_entity_version(protocol_version.value().major, ENTITY_HASH_V2); - assert!(v2 > v1); - - package - } - - #[test] - fn next_entity_version() { - let major = 1; - let mut package = Package::new( - URef::new([0; 32], AccessRights::NONE), - EntityVersions::default(), - BTreeSet::default(), - Groups::default(), - PackageStatus::default(), - PackageKind::SmartContract, - ); - assert_eq!(package.next_entity_version_for(major), 1); - - let next_version = package.insert_entity_version(major, [123; 32].into()); - assert_eq!(next_version, EntityVersionKey::new(major, 1)); - assert_eq!(package.next_entity_version_for(major), 2); - let next_version_2 = package.insert_entity_version(major, [124; 32].into()); - assert_eq!(next_version_2, EntityVersionKey::new(major, 2)); - - let major = 2; - assert_eq!(package.next_entity_version_for(major), 1); - let next_version_3 = package.insert_entity_version(major, [42; 32].into()); - assert_eq!(next_version_3, EntityVersionKey::new(major, 1)); - } - - #[test] - fn roundtrip_serialization() { - let package = make_package_with_two_versions(); - let bytes = package.to_bytes().expect("should serialize"); - let (decoded_package, rem) = Package::from_bytes(&bytes).expect("should deserialize"); - assert_eq!(package, decoded_package); - assert_eq!(rem.len(), 0); - } - - #[test] - fn should_remove_group() { - let mut package = make_package_with_two_versions(); - - assert!(!package.remove_group(&Group::new("Non-existent group"))); - assert!(package.remove_group(&Group::new("Group 1"))); - assert!(!package.remove_group(&Group::new("Group 1"))); // Group no longer exists - } - - #[test] - fn should_disable_and_enable_entity_version() { - const ENTITY_HASH: AddressableEntityHash = AddressableEntityHash::new([123; 32]); - - let mut package = make_package_with_two_versions(); - - assert!( - !package.is_entity_enabled(&ENTITY_HASH), - "nonexisting entity should return false" - ); - - assert_eq!( - package.current_entity_version(), - Some(EntityVersionKey::new(1, 2)) - ); - assert_eq!(package.current_entity_hash(), Some(ENTITY_HASH_V2)); - - assert_eq!( - package.versions(), - &EntityVersions::from(BTreeMap::from_iter([ - (EntityVersionKey::new(1, 1), ENTITY_HASH_V1), - (EntityVersionKey::new(1, 2), ENTITY_HASH_V2) - ])), - ); - assert_eq!( - package.enabled_versions(), - EntityVersions::from(BTreeMap::from_iter([ - (EntityVersionKey::new(1, 1), ENTITY_HASH_V1), - (EntityVersionKey::new(1, 2), ENTITY_HASH_V2) - ])), - ); - - assert!(!package.is_entity_enabled(&ENTITY_HASH)); - - assert_eq!( - package.disable_entity_version(ENTITY_HASH), - Err(Error::EntityNotFound), - "should return entity not found error" - ); - - assert!( - !package.is_entity_enabled(&ENTITY_HASH), - "disabling missing entity shouldnt change outcome" - ); - - let next_version = package.insert_entity_version(1, ENTITY_HASH); - assert!( - package.is_version_enabled(next_version), - "version should exist and be enabled" - ); - assert!(package.is_entity_enabled(&ENTITY_HASH)); - - assert!( - package.is_entity_enabled(&ENTITY_HASH), - "entity should be enabled" - ); - - assert_eq!( - package.disable_entity_version(ENTITY_HASH), - Ok(()), - "should be able to disable version" - ); - assert!(!package.is_entity_enabled(&ENTITY_HASH)); - - assert!( - !package.is_entity_enabled(&ENTITY_HASH), - "entity should be disabled" - ); - assert_eq!( - package.lookup_entity_hash(next_version), - None, - "should not return disabled entity version" - ); - assert!( - !package.is_version_enabled(next_version), - "version should not be enabled" - ); - - assert_eq!( - package.current_entity_version(), - Some(EntityVersionKey::new(1, 2)) - ); - assert_eq!(package.current_entity_hash(), Some(ENTITY_HASH_V2)); - assert_eq!( - package.versions(), - &EntityVersions::from(BTreeMap::from_iter([ - (EntityVersionKey::new(1, 1), ENTITY_HASH_V1), - (EntityVersionKey::new(1, 2), ENTITY_HASH_V2), - (next_version, ENTITY_HASH), - ])), - ); - assert_eq!( - package.enabled_versions(), - EntityVersions::from(BTreeMap::from_iter([ - (EntityVersionKey::new(1, 1), ENTITY_HASH_V1), - (EntityVersionKey::new(1, 2), ENTITY_HASH_V2), - ])), - ); - assert_eq!( - package.disabled_versions(), - &BTreeSet::from_iter([next_version]), - ); - - assert_eq!( - package.current_entity_version(), - Some(EntityVersionKey::new(1, 2)) - ); - assert_eq!(package.current_entity_hash(), Some(ENTITY_HASH_V2)); - - assert_eq!( - package.disable_entity_version(ENTITY_HASH_V2), - Ok(()), - "should be able to disable version 2" - ); - - assert_eq!( - package.enabled_versions(), - EntityVersions::from(BTreeMap::from_iter([( - EntityVersionKey::new(1, 1), - ENTITY_HASH_V1 - ),])), - ); - - assert_eq!( - package.current_entity_version(), - Some(EntityVersionKey::new(1, 1)) - ); - assert_eq!(package.current_entity_hash(), Some(ENTITY_HASH_V1)); - - assert_eq!( - package.disabled_versions(), - &BTreeSet::from_iter([next_version, EntityVersionKey::new(1, 2)]), - ); - - assert_eq!(package.enable_version(ENTITY_HASH_V2), Ok(()),); - - assert_eq!( - package.enabled_versions(), - EntityVersions::from(BTreeMap::from_iter([ - (EntityVersionKey::new(1, 1), ENTITY_HASH_V1), - (EntityVersionKey::new(1, 2), ENTITY_HASH_V2), - ])), - ); - - assert_eq!( - package.disabled_versions(), - &BTreeSet::from_iter([next_version]) - ); - - assert_eq!(package.current_entity_hash(), Some(ENTITY_HASH_V2)); - - assert_eq!(package.enable_version(ENTITY_HASH), Ok(()),); - - assert_eq!( - package.enable_version(ENTITY_HASH), - Ok(()), - "enabling a entity twice should be a noop" - ); - - assert_eq!( - package.enabled_versions(), - EntityVersions::from(BTreeMap::from_iter([ - (EntityVersionKey::new(1, 1), ENTITY_HASH_V1), - (EntityVersionKey::new(1, 2), ENTITY_HASH_V2), - (next_version, ENTITY_HASH), - ])), - ); - - assert_eq!(package.disabled_versions(), &BTreeSet::new(),); - - assert_eq!(package.current_entity_hash(), Some(ENTITY_HASH)); - } - - #[test] - fn should_not_allow_to_enable_non_existing_version() { - let mut package = make_package_with_two_versions(); - - assert_eq!( - package.enable_version(AddressableEntityHash::default()), - Err(Error::EntityNotFound), - ); - } - - #[test] - fn package_hash_from_slice() { - let bytes: Vec = (0..32).collect(); - let package_hash = HashAddr::try_from(&bytes[..]).expect("should create package hash"); - let package_hash = PackageHash::new(package_hash); - assert_eq!(&bytes, &package_hash.as_bytes()); - } - - #[test] - fn package_hash_from_str() { - let package_hash = PackageHash::new([3; 32]); - let encoded = package_hash.to_formatted_string(); - let decoded = PackageHash::from_formatted_str(&encoded).unwrap(); - assert_eq!(package_hash, decoded); - - let invalid_prefix = - "contract-package0000000000000000000000000000000000000000000000000000000000000000"; - assert!(matches!( - PackageHash::from_formatted_str(invalid_prefix).unwrap_err(), - FromStrError::InvalidPrefix - )); - - let short_addr = - "contract-package-00000000000000000000000000000000000000000000000000000000000000"; - assert!(matches!( - PackageHash::from_formatted_str(short_addr).unwrap_err(), - FromStrError::Hash(_) - )); - - let long_addr = - "contract-package-000000000000000000000000000000000000000000000000000000000000000000"; - assert!(matches!( - PackageHash::from_formatted_str(long_addr).unwrap_err(), - FromStrError::Hash(_) - )); - - let invalid_hex = - "contract-package-000000000000000000000000000000000000000000000000000000000000000g"; - assert!(matches!( - PackageHash::from_formatted_str(invalid_hex).unwrap_err(), - FromStrError::Hex(_) - )); - } - - #[test] - fn package_hash_from_legacy_str() { - let package_hash = PackageHash([3; 32]); - let hex_addr = package_hash.to_string(); - let legacy_encoded = format!("contract-package-wasm{}", hex_addr); - let decoded_from_legacy = PackageHash::from_formatted_str(&legacy_encoded) - .expect("should accept legacy prefixed string"); - assert_eq!( - package_hash, decoded_from_legacy, - "decoded_from_legacy should equal decoded" - ); - - let invalid_prefix = - "contract-packagewasm0000000000000000000000000000000000000000000000000000000000000000"; - assert!(matches!( - PackageHash::from_formatted_str(invalid_prefix).unwrap_err(), - FromStrError::InvalidPrefix - )); - - let short_addr = - "contract-package-wasm00000000000000000000000000000000000000000000000000000000000000"; - assert!(matches!( - PackageHash::from_formatted_str(short_addr).unwrap_err(), - FromStrError::Hash(_) - )); - - let long_addr = - "contract-package-wasm000000000000000000000000000000000000000000000000000000000000000000"; - assert!(matches!( - PackageHash::from_formatted_str(long_addr).unwrap_err(), - FromStrError::Hash(_) - )); - - let invalid_hex = - "contract-package-wasm000000000000000000000000000000000000000000000000000000000000000g"; - assert!(matches!( - PackageHash::from_formatted_str(invalid_hex).unwrap_err(), - FromStrError::Hex(_) - )); - } -} - -#[cfg(test)] -mod prop_tests { - use proptest::prelude::*; - - use crate::{bytesrepr, gens}; - - proptest! { - #[test] - fn test_value_contract_package(contract_pkg in gens::package_arb()) { - bytesrepr::test_serialization_roundtrip(&contract_pkg); - } - } -} diff --git a/casper_types_ver_2_0/src/peers_map.rs b/casper_types_ver_2_0/src/peers_map.rs deleted file mode 100644 index c7a28334..00000000 --- a/casper_types_ver_2_0/src/peers_map.rs +++ /dev/null @@ -1,138 +0,0 @@ -use alloc::collections::BTreeMap; - -use crate::bytesrepr::{self, FromBytes, ToBytes}; -use alloc::{ - string::{String, ToString}, - vec::Vec, -}; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -#[cfg(test)] -use core::iter; - -#[cfg(test)] -use rand::Rng; - -#[cfg(test)] -use crate::testing::TestRng; - -/// Node peer entry. -#[derive(Clone, PartialEq, Eq, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[serde(deny_unknown_fields)] -pub struct PeerEntry { - /// Node id. - pub node_id: String, - /// Node address. - pub address: String, -} - -impl PeerEntry { - #[cfg(test)] - pub(crate) fn random(rng: &mut TestRng) -> Self { - Self { - node_id: rng.random_string(10..20), - address: rng.random_string(10..20), - } - } -} - -impl ToBytes for PeerEntry { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.node_id.write_bytes(writer)?; - self.address.write_bytes(writer) - } - - fn serialized_length(&self) -> usize { - self.node_id.serialized_length() + self.address.serialized_length() - } -} - -impl FromBytes for PeerEntry { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (node_id, remainder) = String::from_bytes(bytes)?; - let (address, remainder) = String::from_bytes(remainder)?; - Ok((PeerEntry { node_id, address }, remainder)) - } -} - -/// Map of peer IDs to network addresses. -#[derive(Clone, PartialEq, Eq, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[serde(deny_unknown_fields)] -pub struct Peers(Vec); - -impl Peers { - /// Retrieve collection of `PeerEntry` records. - pub fn into_inner(self) -> Vec { - self.0 - } - - #[cfg(test)] - pub(crate) fn random(rng: &mut TestRng) -> Self { - let count = rng.gen_range(0..10); - let peers = iter::repeat(()) - .map(|_| PeerEntry::random(rng)) - .take(count) - .collect(); - Self(peers) - } -} - -impl From> for Peers { - fn from(input: BTreeMap) -> Self { - let ret = input - .into_iter() - .map(|(node_id, address)| PeerEntry { - node_id: node_id.to_string(), - address, - }) - .collect(); - Peers(ret) - } -} - -impl ToBytes for Peers { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.0.write_bytes(writer) - } - - fn serialized_length(&self) -> usize { - self.0.serialized_length() - } -} - -impl FromBytes for Peers { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (inner, remainder) = Vec::::from_bytes(bytes)?; - Ok((Peers(inner), remainder)) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::testing::TestRng; - - #[test] - fn bytesrepr_roundtrip() { - let rng = &mut TestRng::new(); - - let val = Peers::random(rng); - bytesrepr::test_serialization_roundtrip(&val); - } -} diff --git a/casper_types_ver_2_0/src/phase.rs b/casper_types_ver_2_0/src/phase.rs deleted file mode 100644 index 35586889..00000000 --- a/casper_types_ver_2_0/src/phase.rs +++ /dev/null @@ -1,56 +0,0 @@ -// Can be removed once https://github.com/rust-lang/rustfmt/issues/3362 is resolved. -#[rustfmt::skip] -use alloc::vec; -use alloc::vec::Vec; - -use num_derive::{FromPrimitive, ToPrimitive}; -use num_traits::{FromPrimitive, ToPrimitive}; - -use crate::{ - bytesrepr::{Error, FromBytes, ToBytes}, - CLType, CLTyped, -}; - -/// The number of bytes in a serialized [`Phase`]. -pub const PHASE_SERIALIZED_LENGTH: usize = 1; - -/// The phase in which a given contract is executing. -#[derive(Debug, PartialEq, Eq, Clone, Copy, FromPrimitive, ToPrimitive)] -#[repr(u8)] -pub enum Phase { - /// Set while committing the genesis or upgrade configurations. - System = 0, - /// Set while executing the payment code of a deploy. - Payment = 1, - /// Set while executing the session code of a deploy. - Session = 2, - /// Set while finalizing payment at the end of a deploy. - FinalizePayment = 3, -} - -impl ToBytes for Phase { - fn to_bytes(&self) -> Result, Error> { - // NOTE: Assumed safe as [`Phase`] is represented as u8. - let id = self.to_u8().expect("Phase is represented as a u8"); - - Ok(vec![id]) - } - - fn serialized_length(&self) -> usize { - PHASE_SERIALIZED_LENGTH - } -} - -impl FromBytes for Phase { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (id, rest) = u8::from_bytes(bytes)?; - let phase = FromPrimitive::from_u8(id).ok_or(Error::Formatting)?; - Ok((phase, rest)) - } -} - -impl CLTyped for Phase { - fn cl_type() -> CLType { - CLType::U8 - } -} diff --git a/casper_types_ver_2_0/src/protocol_version.rs b/casper_types_ver_2_0/src/protocol_version.rs deleted file mode 100644 index fe889f1c..00000000 --- a/casper_types_ver_2_0/src/protocol_version.rs +++ /dev/null @@ -1,550 +0,0 @@ -use alloc::{format, string::String, vec::Vec}; -use core::{convert::TryFrom, fmt, str::FromStr}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{de::Error as SerdeError, Deserialize, Deserializer, Serialize, Serializer}; - -use crate::{ - bytesrepr::{Error, FromBytes, ToBytes}, - ParseSemVerError, SemVer, -}; - -/// A newtype wrapping a [`SemVer`] which represents a Casper Platform protocol version. -#[derive(Copy, Clone, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -pub struct ProtocolVersion(SemVer); - -/// The result of [`ProtocolVersion::check_next_version`]. -#[derive(Debug, PartialEq, Eq)] -pub enum VersionCheckResult { - /// Upgrade possible. - Valid { - /// Is this a major protocol version upgrade? - is_major_version: bool, - }, - /// Upgrade is invalid. - Invalid, -} - -impl VersionCheckResult { - /// Checks if given version result is invalid. - /// - /// Invalid means that a given version can not be followed. - pub fn is_invalid(&self) -> bool { - matches!(self, VersionCheckResult::Invalid) - } - - /// Checks if given version is a major protocol version upgrade. - pub fn is_major_version(&self) -> bool { - match self { - VersionCheckResult::Valid { is_major_version } => *is_major_version, - VersionCheckResult::Invalid => false, - } - } -} - -impl ProtocolVersion { - /// Version 1.0.0. - pub const V1_0_0: ProtocolVersion = ProtocolVersion(SemVer { - major: 1, - minor: 0, - patch: 0, - }); - - /// Constructs a new `ProtocolVersion` from `version`. - pub const fn new(version: SemVer) -> ProtocolVersion { - ProtocolVersion(version) - } - - /// Constructs a new `ProtocolVersion` from the given semver parts. - pub const fn from_parts(major: u32, minor: u32, patch: u32) -> ProtocolVersion { - let sem_ver = SemVer::new(major, minor, patch); - Self::new(sem_ver) - } - - /// Returns the inner [`SemVer`]. - pub fn value(&self) -> SemVer { - self.0 - } - - /// Checks if next version can be followed. - pub fn check_next_version(&self, next: &ProtocolVersion) -> VersionCheckResult { - // Protocol major versions should increase monotonically by 1. - let major_bumped = self.0.major.saturating_add(1); - if next.0.major < self.0.major || next.0.major > major_bumped { - return VersionCheckResult::Invalid; - } - - if next.0.major == major_bumped { - return VersionCheckResult::Valid { - is_major_version: true, - }; - } - - // Covers the equal major versions - debug_assert_eq!(next.0.major, self.0.major); - - if next.0.minor < self.0.minor { - // Protocol minor versions within the same major version should not go backwards. - return VersionCheckResult::Invalid; - } - - if next.0.minor > self.0.minor { - return VersionCheckResult::Valid { - is_major_version: false, - }; - } - - // Code belows covers equal minor versions - debug_assert_eq!(next.0.minor, self.0.minor); - - // Protocol patch versions should increase monotonically but can be skipped. - if next.0.patch <= self.0.patch { - return VersionCheckResult::Invalid; - } - - VersionCheckResult::Valid { - is_major_version: false, - } - } - - /// Checks if given protocol version is compatible with current one. - /// - /// Two protocol versions with different major version are considered to be incompatible. - pub fn is_compatible_with(&self, version: &ProtocolVersion) -> bool { - self.0.major == version.0.major - } -} - -impl ToBytes for ProtocolVersion { - fn to_bytes(&self) -> Result, Error> { - self.value().to_bytes() - } - - fn serialized_length(&self) -> usize { - self.value().serialized_length() - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), Error> { - writer.extend(self.0.major.to_le_bytes()); - writer.extend(self.0.minor.to_le_bytes()); - writer.extend(self.0.patch.to_le_bytes()); - Ok(()) - } -} - -impl FromBytes for ProtocolVersion { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (version, rem) = SemVer::from_bytes(bytes)?; - let protocol_version = ProtocolVersion::new(version); - Ok((protocol_version, rem)) - } -} - -impl FromStr for ProtocolVersion { - type Err = ParseSemVerError; - - fn from_str(s: &str) -> Result { - let version = SemVer::try_from(s)?; - Ok(ProtocolVersion::new(version)) - } -} - -impl Serialize for ProtocolVersion { - fn serialize(&self, serializer: S) -> Result { - if serializer.is_human_readable() { - let str = format!("{}.{}.{}", self.0.major, self.0.minor, self.0.patch); - String::serialize(&str, serializer) - } else { - self.0.serialize(serializer) - } - } -} - -impl<'de> Deserialize<'de> for ProtocolVersion { - fn deserialize>(deserializer: D) -> Result { - let semver = if deserializer.is_human_readable() { - let value_as_string = String::deserialize(deserializer)?; - SemVer::try_from(value_as_string.as_str()).map_err(SerdeError::custom)? - } else { - SemVer::deserialize(deserializer)? - }; - Ok(ProtocolVersion(semver)) - } -} - -#[cfg(feature = "json-schema")] -impl JsonSchema for ProtocolVersion { - fn schema_name() -> String { - String::from("ProtocolVersion") - } - - fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { - let schema = gen.subschema_for::(); - let mut schema_object = schema.into_object(); - schema_object.metadata().description = Some("Casper Platform protocol version".to_string()); - schema_object.into() - } -} - -impl fmt::Display for ProtocolVersion { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - self.0.fmt(f) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::SemVer; - - #[test] - fn should_follow_version_with_optional_code() { - let value = VersionCheckResult::Valid { - is_major_version: false, - }; - assert!(!value.is_invalid()); - assert!(!value.is_major_version()); - } - - #[test] - fn should_follow_version_with_required_code() { - let value = VersionCheckResult::Valid { - is_major_version: true, - }; - assert!(!value.is_invalid()); - assert!(value.is_major_version()); - } - - #[test] - fn should_not_follow_version_with_invalid_code() { - let value = VersionCheckResult::Invalid; - assert!(value.is_invalid()); - assert!(!value.is_major_version()); - } - - #[test] - fn should_be_able_to_get_instance() { - let initial_value = SemVer::new(1, 0, 0); - let item = ProtocolVersion::new(initial_value); - assert_eq!(initial_value, item.value(), "should have equal value") - } - - #[test] - fn should_be_able_to_compare_two_instances() { - let lhs = ProtocolVersion::new(SemVer::new(1, 0, 0)); - let rhs = ProtocolVersion::new(SemVer::new(1, 0, 0)); - assert_eq!(lhs, rhs, "should be equal"); - let rhs = ProtocolVersion::new(SemVer::new(2, 0, 0)); - assert_ne!(lhs, rhs, "should not be equal") - } - - #[test] - fn should_be_able_to_default() { - let defaulted = ProtocolVersion::default(); - let expected = ProtocolVersion::new(SemVer::new(0, 0, 0)); - assert_eq!(defaulted, expected, "should be equal") - } - - #[test] - fn should_be_able_to_compare_relative_value() { - let lhs = ProtocolVersion::new(SemVer::new(2, 0, 0)); - let rhs = ProtocolVersion::new(SemVer::new(1, 0, 0)); - assert!(lhs > rhs, "should be gt"); - let rhs = ProtocolVersion::new(SemVer::new(2, 0, 0)); - assert!(lhs >= rhs, "should be gte"); - assert!(lhs <= rhs, "should be lte"); - let lhs = ProtocolVersion::new(SemVer::new(1, 0, 0)); - assert!(lhs < rhs, "should be lt"); - } - - #[test] - fn should_follow_major_version_upgrade() { - // If the upgrade protocol version is lower than or the same as EE's current in-use protocol - // version the upgrade is rejected and an error is returned; this includes the special case - // of a defaulted protocol version ( 0.0.0 ). - let prev = ProtocolVersion::new(SemVer::new(1, 0, 0)); - let next = ProtocolVersion::new(SemVer::new(2, 0, 0)); - assert!( - prev.check_next_version(&next).is_major_version(), - "should be major version" - ); - } - - #[test] - fn should_reject_if_major_version_decreases() { - let prev = ProtocolVersion::new(SemVer::new(10, 0, 0)); - let next = ProtocolVersion::new(SemVer::new(9, 0, 0)); - // Major version must not decrease ... - assert_eq!(prev.check_next_version(&next), VersionCheckResult::Invalid); - } - - #[test] - fn should_check_follows_minor_version_upgrade() { - // [major version] may remain the same in the case of a minor or patch version increase. - - // Minor version must not decrease within the same major version - let prev = ProtocolVersion::new(SemVer::new(1, 1, 0)); - let next = ProtocolVersion::new(SemVer::new(1, 2, 0)); - - let value = prev.check_next_version(&next); - assert!(!value.is_invalid(), "should be valid"); - assert!(!value.is_major_version(), "should not be a major version"); - } - - #[test] - fn should_not_care_if_minor_bump_resets_patch() { - let prev = ProtocolVersion::new(SemVer::new(1, 2, 0)); - let next = ProtocolVersion::new(SemVer::new(1, 3, 1)); - assert_eq!( - prev.check_next_version(&next), - VersionCheckResult::Valid { - is_major_version: false - } - ); - - let prev = ProtocolVersion::new(SemVer::new(1, 20, 42)); - let next = ProtocolVersion::new(SemVer::new(1, 30, 43)); - assert_eq!( - prev.check_next_version(&next), - VersionCheckResult::Valid { - is_major_version: false - } - ); - } - - #[test] - fn should_not_care_if_major_bump_resets_minor_or_patch() { - // A major version increase resets both the minor and patch versions to ( 0.0 ). - let prev = ProtocolVersion::new(SemVer::new(1, 0, 0)); - let next = ProtocolVersion::new(SemVer::new(2, 1, 0)); - assert_eq!( - prev.check_next_version(&next), - VersionCheckResult::Valid { - is_major_version: true - } - ); - - let next = ProtocolVersion::new(SemVer::new(2, 0, 1)); - assert_eq!( - prev.check_next_version(&next), - VersionCheckResult::Valid { - is_major_version: true - } - ); - - let next = ProtocolVersion::new(SemVer::new(2, 1, 1)); - assert_eq!( - prev.check_next_version(&next), - VersionCheckResult::Valid { - is_major_version: true - } - ); - } - - #[test] - fn should_reject_patch_version_rollback() { - // Patch version must not decrease or remain the same within the same major and minor - // version pair, but may skip. - let prev = ProtocolVersion::new(SemVer::new(1, 0, 42)); - let next = ProtocolVersion::new(SemVer::new(1, 0, 41)); - assert_eq!(prev.check_next_version(&next), VersionCheckResult::Invalid); - let next = ProtocolVersion::new(SemVer::new(1, 0, 13)); - assert_eq!(prev.check_next_version(&next), VersionCheckResult::Invalid); - } - - #[test] - fn should_accept_patch_version_update_with_optional_code() { - let prev = ProtocolVersion::new(SemVer::new(1, 0, 0)); - let next = ProtocolVersion::new(SemVer::new(1, 0, 1)); - let value = prev.check_next_version(&next); - assert!(!value.is_invalid(), "should be valid"); - assert!(!value.is_major_version(), "should not be a major version"); - - let prev = ProtocolVersion::new(SemVer::new(1, 0, 8)); - let next = ProtocolVersion::new(SemVer::new(1, 0, 42)); - let value = prev.check_next_version(&next); - assert!(!value.is_invalid(), "should be valid"); - assert!(!value.is_major_version(), "should not be a major version"); - } - - #[test] - fn should_accept_minor_version_update_with_optional_code() { - // installer is optional for minor bump - let prev = ProtocolVersion::new(SemVer::new(1, 0, 0)); - let next = ProtocolVersion::new(SemVer::new(1, 1, 0)); - let value = prev.check_next_version(&next); - assert!(!value.is_invalid(), "should be valid"); - assert!(!value.is_major_version(), "should not be a major version"); - - let prev = ProtocolVersion::new(SemVer::new(3, 98, 0)); - let next = ProtocolVersion::new(SemVer::new(3, 99, 0)); - let value = prev.check_next_version(&next); - assert!(!value.is_invalid(), "should be valid"); - assert!(!value.is_major_version(), "should not be a major version"); - } - - #[test] - fn should_allow_skip_minor_version_within_major_version() { - let prev = ProtocolVersion::new(SemVer::new(1, 1, 0)); - - let next = ProtocolVersion::new(SemVer::new(1, 3, 0)); - assert_eq!( - prev.check_next_version(&next), - VersionCheckResult::Valid { - is_major_version: false - } - ); - - let next = ProtocolVersion::new(SemVer::new(1, 7, 0)); - assert_eq!( - prev.check_next_version(&next), - VersionCheckResult::Valid { - is_major_version: false - } - ); - } - - #[test] - fn should_allow_skip_patch_version_within_minor_version() { - let prev = ProtocolVersion::new(SemVer::new(1, 1, 0)); - - let next = ProtocolVersion::new(SemVer::new(1, 1, 2)); - assert_eq!( - prev.check_next_version(&next), - VersionCheckResult::Valid { - is_major_version: false - } - ); - } - - #[test] - fn should_allow_skipped_minor_and_patch_on_major_bump() { - // skip minor - let prev = ProtocolVersion::new(SemVer::new(1, 0, 0)); - let next = ProtocolVersion::new(SemVer::new(2, 1, 0)); - assert_eq!( - prev.check_next_version(&next), - VersionCheckResult::Valid { - is_major_version: true - } - ); - - // skip patch - let prev = ProtocolVersion::new(SemVer::new(1, 0, 0)); - let next = ProtocolVersion::new(SemVer::new(2, 0, 1)); - assert_eq!( - prev.check_next_version(&next), - VersionCheckResult::Valid { - is_major_version: true - } - ); - - // skip many minors and patches - let prev = ProtocolVersion::new(SemVer::new(1, 0, 0)); - let next = ProtocolVersion::new(SemVer::new(2, 3, 10)); - assert_eq!( - prev.check_next_version(&next), - VersionCheckResult::Valid { - is_major_version: true - } - ); - } - - #[test] - fn should_allow_code_on_major_update() { - // major upgrade requires installer to be present - let prev = ProtocolVersion::new(SemVer::new(1, 0, 0)); - let next = ProtocolVersion::new(SemVer::new(2, 0, 0)); - assert!( - prev.check_next_version(&next).is_major_version(), - "should be major version" - ); - - let prev = ProtocolVersion::new(SemVer::new(2, 99, 99)); - let next = ProtocolVersion::new(SemVer::new(3, 0, 0)); - assert!( - prev.check_next_version(&next).is_major_version(), - "should be major version" - ); - } - - #[test] - fn should_not_skip_major_version() { - // can bump only by 1 - let prev = ProtocolVersion::new(SemVer::new(1, 0, 0)); - let next = ProtocolVersion::new(SemVer::new(3, 0, 0)); - assert_eq!(prev.check_next_version(&next), VersionCheckResult::Invalid); - } - - #[test] - fn should_reject_major_version_rollback() { - // can bump forward - let prev = ProtocolVersion::new(SemVer::new(2, 0, 0)); - let next = ProtocolVersion::new(SemVer::new(0, 0, 0)); - assert_eq!(prev.check_next_version(&next), VersionCheckResult::Invalid); - } - - #[test] - fn should_check_same_version_is_invalid() { - for ver in &[ - ProtocolVersion::from_parts(1, 0, 0), - ProtocolVersion::from_parts(1, 2, 0), - ProtocolVersion::from_parts(1, 2, 3), - ] { - assert_eq!(ver.check_next_version(ver), VersionCheckResult::Invalid); - } - } - - #[test] - fn should_not_be_compatible_with_different_major_version() { - let current = ProtocolVersion::from_parts(1, 2, 3); - let other = ProtocolVersion::from_parts(2, 5, 6); - assert!(!current.is_compatible_with(&other)); - - let current = ProtocolVersion::from_parts(1, 0, 0); - let other = ProtocolVersion::from_parts(2, 0, 0); - assert!(!current.is_compatible_with(&other)); - } - - #[test] - fn should_be_compatible_with_equal_major_version_backwards() { - let current = ProtocolVersion::from_parts(1, 99, 99); - let other = ProtocolVersion::from_parts(1, 0, 0); - assert!(current.is_compatible_with(&other)); - } - - #[test] - fn should_be_compatible_with_equal_major_version_forwards() { - let current = ProtocolVersion::from_parts(1, 0, 0); - let other = ProtocolVersion::from_parts(1, 99, 99); - assert!(current.is_compatible_with(&other)); - } - - #[test] - fn should_serialize_to_json_properly() { - let protocol_version = ProtocolVersion::from_parts(1, 1, 1); - let json = serde_json::to_string(&protocol_version).unwrap(); - let expected = "\"1.1.1\""; - assert_eq!(json, expected); - } - - #[test] - fn serialize_roundtrip() { - let protocol_version = ProtocolVersion::from_parts(1, 1, 1); - let serialized_json = serde_json::to_string(&protocol_version).unwrap(); - assert_eq!( - protocol_version, - serde_json::from_str(&serialized_json).unwrap() - ); - - let serialized_bincode = bincode::serialize(&protocol_version).unwrap(); - assert_eq!( - protocol_version, - bincode::deserialize(&serialized_bincode).unwrap() - ); - } -} diff --git a/casper_types_ver_2_0/src/reactor_state.rs b/casper_types_ver_2_0/src/reactor_state.rs deleted file mode 100644 index 19de98d8..00000000 --- a/casper_types_ver_2_0/src/reactor_state.rs +++ /dev/null @@ -1,109 +0,0 @@ -use crate::bytesrepr::{self, FromBytes, ToBytes, U8_SERIALIZED_LENGTH}; -use alloc::vec::Vec; -#[cfg(feature = "datasize")] -use datasize::DataSize; -use derive_more::Display; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -#[cfg(test)] -use rand::Rng; - -#[cfg(test)] -use crate::testing::TestRng; - -/// The state of the reactor. -#[derive(Copy, Clone, PartialEq, Eq, Serialize, Deserialize, Debug, Display)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -pub enum ReactorState { - /// Get all components and reactor state set up on start. - Initialize, - /// Orient to the network and attempt to catch up to tip. - CatchUp, - /// Running commit upgrade and creating immediate switch block. - Upgrading, - /// Stay caught up with tip. - KeepUp, - /// Node is currently caught up and is an active validator. - Validate, - /// Node should be shut down for upgrade. - ShutdownForUpgrade, -} - -impl ReactorState { - #[cfg(test)] - pub(crate) fn random(rng: &mut TestRng) -> Self { - match rng.gen_range(0..6) { - 0 => Self::Initialize, - 1 => Self::CatchUp, - 2 => Self::Upgrading, - 3 => Self::KeepUp, - 4 => Self::Validate, - 5 => Self::ShutdownForUpgrade, - _ => panic!(), - } - } -} - -const INITIALIZE_TAG: u8 = 0; -const CATCHUP_TAG: u8 = 1; -const UPGRADING_TAG: u8 = 2; -const KEEPUP_TAG: u8 = 3; -const VALIDATE_TAG: u8 = 4; -const SHUTDOWN_FOR_UPGRADE_TAG: u8 = 5; - -impl ToBytes for ReactorState { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - match self { - ReactorState::Initialize => INITIALIZE_TAG, - ReactorState::CatchUp => CATCHUP_TAG, - ReactorState::Upgrading => UPGRADING_TAG, - ReactorState::KeepUp => KEEPUP_TAG, - ReactorState::Validate => VALIDATE_TAG, - ReactorState::ShutdownForUpgrade => SHUTDOWN_FOR_UPGRADE_TAG, - } - .write_bytes(writer) - } - - fn serialized_length(&self) -> usize { - U8_SERIALIZED_LENGTH - } -} - -impl FromBytes for ReactorState { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (tag, remainder) = u8::from_bytes(bytes)?; - let reactor_state = match tag { - INITIALIZE_TAG => ReactorState::Initialize, - CATCHUP_TAG => ReactorState::CatchUp, - UPGRADING_TAG => ReactorState::Upgrading, - KEEPUP_TAG => ReactorState::KeepUp, - VALIDATE_TAG => ReactorState::Validate, - SHUTDOWN_FOR_UPGRADE_TAG => ReactorState::ShutdownForUpgrade, - _ => return Err(bytesrepr::Error::NotRepresentable), - }; - Ok((reactor_state, remainder)) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::testing::TestRng; - - #[test] - fn bytesrepr_roundtrip() { - let rng = &mut TestRng::new(); - - let val = ReactorState::random(rng); - bytesrepr::test_serialization_roundtrip(&val); - } -} diff --git a/casper_types_ver_2_0/src/semver.rs b/casper_types_ver_2_0/src/semver.rs deleted file mode 100644 index 5feafe53..00000000 --- a/casper_types_ver_2_0/src/semver.rs +++ /dev/null @@ -1,152 +0,0 @@ -use alloc::vec::Vec; -use core::{ - convert::TryFrom, - fmt::{self, Display, Formatter}, - num::ParseIntError, -}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -use serde::{Deserialize, Serialize}; - -use crate::bytesrepr::{self, Error, FromBytes, ToBytes, U32_SERIALIZED_LENGTH}; - -/// Length of SemVer when serialized -pub const SEM_VER_SERIALIZED_LENGTH: usize = 3 * U32_SERIALIZED_LENGTH; - -/// A struct for semantic versioning. -#[derive( - Copy, Clone, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize, -)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -pub struct SemVer { - /// Major version. - pub major: u32, - /// Minor version. - pub minor: u32, - /// Patch version. - pub patch: u32, -} - -impl SemVer { - /// Version 1.0.0. - pub const V1_0_0: SemVer = SemVer { - major: 1, - minor: 0, - patch: 0, - }; - - /// Constructs a new `SemVer` from the given semver parts. - pub const fn new(major: u32, minor: u32, patch: u32) -> SemVer { - SemVer { - major, - minor, - patch, - } - } -} - -impl ToBytes for SemVer { - fn to_bytes(&self) -> Result, Error> { - let mut ret = bytesrepr::unchecked_allocate_buffer(self); - ret.append(&mut self.major.to_bytes()?); - ret.append(&mut self.minor.to_bytes()?); - ret.append(&mut self.patch.to_bytes()?); - Ok(ret) - } - - fn serialized_length(&self) -> usize { - SEM_VER_SERIALIZED_LENGTH - } -} - -impl FromBytes for SemVer { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (major, rem): (u32, &[u8]) = FromBytes::from_bytes(bytes)?; - let (minor, rem): (u32, &[u8]) = FromBytes::from_bytes(rem)?; - let (patch, rem): (u32, &[u8]) = FromBytes::from_bytes(rem)?; - Ok((SemVer::new(major, minor, patch), rem)) - } -} - -impl Display for SemVer { - fn fmt(&self, f: &mut Formatter) -> fmt::Result { - write!(f, "{}.{}.{}", self.major, self.minor, self.patch) - } -} - -/// Parsing error when creating a SemVer. -#[derive(Debug, Clone, PartialEq, Eq)] -pub enum ParseSemVerError { - /// Invalid version format. - InvalidVersionFormat, - /// Error parsing an integer. - ParseIntError(ParseIntError), -} - -impl Display for ParseSemVerError { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - match self { - ParseSemVerError::InvalidVersionFormat => formatter.write_str("invalid version format"), - ParseSemVerError::ParseIntError(error) => error.fmt(formatter), - } - } -} - -impl From for ParseSemVerError { - fn from(error: ParseIntError) -> ParseSemVerError { - ParseSemVerError::ParseIntError(error) - } -} - -impl TryFrom<&str> for SemVer { - type Error = ParseSemVerError; - fn try_from(value: &str) -> Result { - let tokens: Vec<&str> = value.split('.').collect(); - if tokens.len() != 3 { - return Err(ParseSemVerError::InvalidVersionFormat); - } - - Ok(SemVer { - major: tokens[0].parse()?, - minor: tokens[1].parse()?, - patch: tokens[2].parse()?, - }) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use core::convert::TryInto; - - #[test] - fn should_compare_semver_versions() { - assert!(SemVer::new(0, 0, 0) < SemVer::new(1, 2, 3)); - assert!(SemVer::new(1, 1, 0) < SemVer::new(1, 2, 0)); - assert!(SemVer::new(1, 0, 0) < SemVer::new(1, 2, 0)); - assert!(SemVer::new(1, 0, 0) < SemVer::new(1, 2, 3)); - assert!(SemVer::new(1, 2, 0) < SemVer::new(1, 2, 3)); - assert!(SemVer::new(1, 2, 3) == SemVer::new(1, 2, 3)); - assert!(SemVer::new(1, 2, 3) >= SemVer::new(1, 2, 3)); - assert!(SemVer::new(1, 2, 3) <= SemVer::new(1, 2, 3)); - assert!(SemVer::new(2, 0, 0) >= SemVer::new(1, 99, 99)); - assert!(SemVer::new(2, 0, 0) > SemVer::new(1, 99, 99)); - } - - #[test] - fn parse_from_string() { - let ver1: SemVer = "100.20.3".try_into().expect("should parse"); - assert_eq!(ver1, SemVer::new(100, 20, 3)); - let ver2: SemVer = "0.0.1".try_into().expect("should parse"); - assert_eq!(ver2, SemVer::new(0, 0, 1)); - - assert!(SemVer::try_from("1.a.2.3").is_err()); - assert!(SemVer::try_from("1. 2.3").is_err()); - assert!(SemVer::try_from("12345124361461.0.1").is_err()); - assert!(SemVer::try_from("1.2.3.4").is_err()); - assert!(SemVer::try_from("1.2").is_err()); - assert!(SemVer::try_from("1").is_err()); - assert!(SemVer::try_from("0").is_err()); - } -} diff --git a/casper_types_ver_2_0/src/serde_helpers.rs b/casper_types_ver_2_0/src/serde_helpers.rs deleted file mode 100644 index b1e94baf..00000000 --- a/casper_types_ver_2_0/src/serde_helpers.rs +++ /dev/null @@ -1,109 +0,0 @@ -use alloc::string::String; -use core::convert::TryFrom; - -use serde::{de::Error as SerdeError, Deserialize, Deserializer, Serialize, Serializer}; - -use crate::Digest; - -pub(crate) mod raw_32_byte_array { - use super::*; - - pub(crate) fn serialize( - array: &[u8; 32], - serializer: S, - ) -> Result { - if serializer.is_human_readable() { - base16::encode_lower(array).serialize(serializer) - } else { - array.serialize(serializer) - } - } - - pub(crate) fn deserialize<'de, D: Deserializer<'de>>( - deserializer: D, - ) -> Result<[u8; 32], D::Error> { - if deserializer.is_human_readable() { - let hex_string = String::deserialize(deserializer)?; - let bytes = base16::decode(hex_string.as_bytes()).map_err(SerdeError::custom)?; - <[u8; 32]>::try_from(bytes.as_ref()).map_err(SerdeError::custom) - } else { - <[u8; 32]>::deserialize(deserializer) - } - } -} - -pub(crate) mod contract_hash_as_digest { - use super::*; - use crate::AddressableEntityHash; - - pub(crate) fn serialize( - contract_hash: &AddressableEntityHash, - serializer: S, - ) -> Result { - Digest::from(contract_hash.value()).serialize(serializer) - } - - pub(crate) fn deserialize<'de, D: Deserializer<'de>>( - deserializer: D, - ) -> Result { - let digest = Digest::deserialize(deserializer)?; - Ok(AddressableEntityHash::new(digest.value())) - } -} - -pub(crate) mod contract_package_hash_as_digest { - use super::*; - use crate::PackageHash; - - pub(crate) fn serialize( - contract_package_hash: &PackageHash, - serializer: S, - ) -> Result { - Digest::from(contract_package_hash.value()).serialize(serializer) - } - - pub(crate) fn deserialize<'de, D: Deserializer<'de>>( - deserializer: D, - ) -> Result { - let digest = Digest::deserialize(deserializer)?; - Ok(PackageHash::new(digest.value())) - } -} - -/// This module allows `DeployHash`es to be serialized and deserialized using the underlying -/// `[u8; 32]` rather than delegating to the wrapped `Digest`, which in turn delegates to a -/// `Vec` for legacy reasons. -/// -/// This is required as the `DeployHash` defined in `casper-types` up until v4.0.0 used the array -/// form, while the `DeployHash` defined in `casper-node` during this period delegated to `Digest`. -/// -/// We use this module in places where the old `casper_types_ver_2_0::DeployHash` was held as a member of a -/// type which implements `Serialize` and/or `Deserialize`. -pub(crate) mod deploy_hash_as_array { - use super::*; - use crate::DeployHash; - - pub(crate) fn serialize( - deploy_hash: &DeployHash, - serializer: S, - ) -> Result { - if serializer.is_human_readable() { - base16::encode_lower(&deploy_hash.inner().value()).serialize(serializer) - } else { - deploy_hash.inner().value().serialize(serializer) - } - } - - pub(crate) fn deserialize<'de, D: Deserializer<'de>>( - deserializer: D, - ) -> Result { - let bytes = if deserializer.is_human_readable() { - let hex_string = String::deserialize(deserializer)?; - let vec_bytes = base16::decode(hex_string.as_bytes()).map_err(SerdeError::custom)?; - <[u8; DeployHash::LENGTH]>::try_from(vec_bytes.as_ref()).map_err(SerdeError::custom)? - } else { - <[u8; DeployHash::LENGTH]>::deserialize(deserializer)? - }; - Ok(DeployHash::new(Digest::from(bytes))) - } -} diff --git a/casper_types_ver_2_0/src/stored_value.rs b/casper_types_ver_2_0/src/stored_value.rs deleted file mode 100644 index 7725fb32..00000000 --- a/casper_types_ver_2_0/src/stored_value.rs +++ /dev/null @@ -1,899 +0,0 @@ -mod global_state_identifier; -mod type_mismatch; - -use alloc::{ - boxed::Box, - string::{String, ToString}, - vec::Vec, -}; -use core::{convert::TryFrom, fmt::Debug}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{de, ser, Deserialize, Deserializer, Serialize, Serializer}; -use serde_bytes::ByteBuf; - -use crate::{ - account::Account, - bytesrepr::{self, Error, FromBytes, ToBytes, U8_SERIALIZED_LENGTH}, - contract_messages::{MessageChecksum, MessageTopicSummary}, - contract_wasm::ContractWasm, - contracts::{Contract, ContractPackage}, - package::Package, - system::auction::{Bid, BidKind, EraInfo, UnbondingPurse, WithdrawPurse}, - AddressableEntity, ByteCode, CLValue, DeployInfo, Transfer, -}; -pub use global_state_identifier::GlobalStateIdentifier; -pub use type_mismatch::TypeMismatch; - -#[allow(clippy::large_enum_variant)] -#[repr(u8)] -enum Tag { - CLValue = 0, - Account = 1, - ContractWasm = 2, - Contract = 3, - ContractPackage = 4, - Transfer = 5, - DeployInfo = 6, - EraInfo = 7, - Bid = 8, - Withdraw = 9, - Unbonding = 10, - AddressableEntity = 11, - BidKind = 12, - Package = 13, - ByteCode = 14, - MessageTopic = 15, - Message = 16, -} - -/// A value stored in Global State. -#[allow(clippy::large_enum_variant)] -#[derive(Eq, PartialEq, Clone, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr( - feature = "json-schema", - derive(JsonSchema), - schemars(with = "serde_helpers::BinarySerHelper") -)] -pub enum StoredValue { - /// A CLValue. - CLValue(CLValue), - /// An account. - Account(Account), - /// Contract wasm. - ContractWasm(ContractWasm), - /// A contract. - Contract(Contract), - /// A contract package. - ContractPackage(ContractPackage), - /// A `Transfer`. - Transfer(Transfer), - /// Info about a deploy. - DeployInfo(DeployInfo), - /// Info about an era. - EraInfo(EraInfo), - /// Variant that stores [`Bid`]. - Bid(Box), - /// Variant that stores withdraw information. - Withdraw(Vec), - /// Unbonding information. - Unbonding(Vec), - /// An `AddressableEntity`. - AddressableEntity(AddressableEntity), - /// Variant that stores [`BidKind`]. - BidKind(BidKind), - /// A `Package`. - Package(Package), - /// A record of byte code. - ByteCode(ByteCode), - /// Variant that stores a message topic. - MessageTopic(MessageTopicSummary), - /// Variant that stores a message digest. - Message(MessageChecksum), -} - -impl StoredValue { - /// Returns a reference to the wrapped `CLValue` if this is a `CLValue` variant. - pub fn as_cl_value(&self) -> Option<&CLValue> { - match self { - StoredValue::CLValue(cl_value) => Some(cl_value), - _ => None, - } - } - - /// Returns a reference to the wrapped `Account` if this is an `Account` variant. - pub fn as_account(&self) -> Option<&Account> { - match self { - StoredValue::Account(account) => Some(account), - _ => None, - } - } - - /// Returns a reference to the wrapped `ByteCode` if this is a `ByteCode` variant. - pub fn as_byte_code(&self) -> Option<&ByteCode> { - match self { - StoredValue::ByteCode(byte_code) => Some(byte_code), - _ => None, - } - } - - /// Returns a reference to the wrapped `Contract` if this is a `Contract` variant. - pub fn as_contract(&self) -> Option<&Contract> { - match self { - StoredValue::Contract(contract) => Some(contract), - _ => None, - } - } - - /// Returns a reference to the wrapped `Package` if this is a `Package` variant. - pub fn as_package(&self) -> Option<&Package> { - match self { - StoredValue::Package(package) => Some(package), - _ => None, - } - } - - /// Returns a reference to the wrapped `Transfer` if this is a `Transfer` variant. - pub fn as_transfer(&self) -> Option<&Transfer> { - match self { - StoredValue::Transfer(transfer) => Some(transfer), - _ => None, - } - } - - /// Returns a reference to the wrapped `DeployInfo` if this is a `DeployInfo` variant. - pub fn as_deploy_info(&self) -> Option<&DeployInfo> { - match self { - StoredValue::DeployInfo(deploy_info) => Some(deploy_info), - _ => None, - } - } - - /// Returns a reference to the wrapped `EraInfo` if this is an `EraInfo` variant. - pub fn as_era_info(&self) -> Option<&EraInfo> { - match self { - StoredValue::EraInfo(era_info) => Some(era_info), - _ => None, - } - } - - /// Returns a reference to the wrapped `Bid` if this is a `Bid` variant. - pub fn as_bid(&self) -> Option<&Bid> { - match self { - StoredValue::Bid(bid) => Some(bid), - _ => None, - } - } - - /// Returns a reference to the wrapped list of `WithdrawPurse`s if this is a `Withdraw` variant. - pub fn as_withdraw(&self) -> Option<&Vec> { - match self { - StoredValue::Withdraw(withdraw_purses) => Some(withdraw_purses), - _ => None, - } - } - - /// Returns a reference to the wrapped list of `UnbondingPurse`s if this is an `Unbonding` - /// variant. - pub fn as_unbonding(&self) -> Option<&Vec> { - match self { - StoredValue::Unbonding(unbonding_purses) => Some(unbonding_purses), - _ => None, - } - } - - /// Returns a reference to the wrapped `AddressableEntity` if this is an `AddressableEntity` - /// variant. - pub fn as_addressable_entity(&self) -> Option<&AddressableEntity> { - match self { - StoredValue::AddressableEntity(entity) => Some(entity), - _ => None, - } - } - - /// Returns a reference to the wrapped `MessageTopicSummary` if this is a `MessageTopic` - /// variant. - pub fn as_message_topic_summary(&self) -> Option<&MessageTopicSummary> { - match self { - StoredValue::MessageTopic(summary) => Some(summary), - _ => None, - } - } - - /// Returns a reference to the wrapped `MessageChecksum` if this is a `Message` - /// variant. - pub fn as_message_checksum(&self) -> Option<&MessageChecksum> { - match self { - StoredValue::Message(checksum) => Some(checksum), - _ => None, - } - } - - /// Returns a reference to the wrapped `BidKind` if this is a `BidKind` variant. - pub fn as_bid_kind(&self) -> Option<&BidKind> { - match self { - StoredValue::BidKind(bid_kind) => Some(bid_kind), - _ => None, - } - } - - /// Returns the `CLValue` if this is a `CLValue` variant. - pub fn into_cl_value(self) -> Option { - match self { - StoredValue::CLValue(cl_value) => Some(cl_value), - _ => None, - } - } - - /// Returns the `Account` if this is an `Account` variant. - pub fn into_account(self) -> Option { - match self { - StoredValue::Account(account) => Some(account), - _ => None, - } - } - - /// Returns the `ContractWasm` if this is a `ContractWasm` variant. - pub fn into_contract_wasm(self) -> Option { - match self { - StoredValue::ContractWasm(contract_wasm) => Some(contract_wasm), - _ => None, - } - } - - /// Returns the `Contract` if this is a `Contract` variant. - pub fn into_contract(self) -> Option { - match self { - StoredValue::Contract(contract) => Some(contract), - _ => None, - } - } - - /// Returns the `Package` if this is a `Package` variant. - pub fn into_contract_package(self) -> Option { - match self { - StoredValue::ContractPackage(contract_package) => Some(contract_package), - _ => None, - } - } - - /// Returns the `Transfer` if this is a `Transfer` variant. - pub fn into_transfer(self) -> Option { - match self { - StoredValue::Transfer(transfer) => Some(transfer), - _ => None, - } - } - - /// Returns the `DeployInfo` if this is a `DeployInfo` variant. - pub fn into_deploy_info(self) -> Option { - match self { - StoredValue::DeployInfo(deploy_info) => Some(deploy_info), - _ => None, - } - } - - /// Returns the `EraInfo` if this is an `EraInfo` variant. - pub fn into_era_info(self) -> Option { - match self { - StoredValue::EraInfo(era_info) => Some(era_info), - _ => None, - } - } - - /// Returns the `Bid` if this is a `Bid` variant. - pub fn into_bid(self) -> Option { - match self { - StoredValue::Bid(bid) => Some(*bid), - _ => None, - } - } - - /// Returns the list of `WithdrawPurse`s if this is a `Withdraw` variant. - pub fn into_withdraw(self) -> Option> { - match self { - StoredValue::Withdraw(withdraw_purses) => Some(withdraw_purses), - _ => None, - } - } - - /// Returns the list of `UnbondingPurse`s if this is an `Unbonding` variant. - pub fn into_unbonding(self) -> Option> { - match self { - StoredValue::Unbonding(unbonding_purses) => Some(unbonding_purses), - _ => None, - } - } - - /// Returns the `AddressableEntity` if this is an `AddressableEntity` variant. - pub fn into_addressable_entity(self) -> Option { - match self { - StoredValue::AddressableEntity(entity) => Some(entity), - _ => None, - } - } - - /// Returns the `BidKind` if this is a `BidKind` variant. - pub fn into_bid_kind(self) -> Option { - match self { - StoredValue::BidKind(bid_kind) => Some(bid_kind), - _ => None, - } - } - - /// Returns the type name of the [`StoredValue`] enum variant. - /// - /// For [`CLValue`] variants it will return the name of the [`CLType`](crate::cl_type::CLType) - pub fn type_name(&self) -> String { - match self { - StoredValue::CLValue(cl_value) => format!("{:?}", cl_value.cl_type()), - StoredValue::Account(_) => "Account".to_string(), - StoredValue::ContractWasm(_) => "ContractWasm".to_string(), - StoredValue::Contract(_) => "Contract".to_string(), - StoredValue::ContractPackage(_) => "ContractPackage".to_string(), - StoredValue::Transfer(_) => "Transfer".to_string(), - StoredValue::DeployInfo(_) => "DeployInfo".to_string(), - StoredValue::EraInfo(_) => "EraInfo".to_string(), - StoredValue::Bid(_) => "Bid".to_string(), - StoredValue::Withdraw(_) => "Withdraw".to_string(), - StoredValue::Unbonding(_) => "Unbonding".to_string(), - StoredValue::AddressableEntity(_) => "AddressableEntity".to_string(), - StoredValue::BidKind(_) => "BidKind".to_string(), - StoredValue::ByteCode(_) => "ByteCode".to_string(), - StoredValue::Package(_) => "Package".to_string(), - StoredValue::MessageTopic(_) => "MessageTopic".to_string(), - StoredValue::Message(_) => "Message".to_string(), - } - } - - fn tag(&self) -> Tag { - match self { - StoredValue::CLValue(_) => Tag::CLValue, - StoredValue::Account(_) => Tag::Account, - StoredValue::ContractWasm(_) => Tag::ContractWasm, - StoredValue::ContractPackage(_) => Tag::ContractPackage, - StoredValue::Contract(_) => Tag::Contract, - StoredValue::Transfer(_) => Tag::Transfer, - StoredValue::DeployInfo(_) => Tag::DeployInfo, - StoredValue::EraInfo(_) => Tag::EraInfo, - StoredValue::Bid(_) => Tag::Bid, - StoredValue::Withdraw(_) => Tag::Withdraw, - StoredValue::Unbonding(_) => Tag::Unbonding, - StoredValue::AddressableEntity(_) => Tag::AddressableEntity, - StoredValue::BidKind(_) => Tag::BidKind, - StoredValue::Package(_) => Tag::Package, - StoredValue::ByteCode(_) => Tag::ByteCode, - StoredValue::MessageTopic(_) => Tag::MessageTopic, - StoredValue::Message(_) => Tag::Message, - } - } -} - -impl From for StoredValue { - fn from(value: CLValue) -> StoredValue { - StoredValue::CLValue(value) - } -} -impl From for StoredValue { - fn from(value: Account) -> StoredValue { - StoredValue::Account(value) - } -} - -impl From for StoredValue { - fn from(value: ContractWasm) -> Self { - StoredValue::ContractWasm(value) - } -} - -impl From for StoredValue { - fn from(value: ContractPackage) -> Self { - StoredValue::ContractPackage(value) - } -} - -impl From for StoredValue { - fn from(value: Contract) -> Self { - StoredValue::Contract(value) - } -} - -impl From for StoredValue { - fn from(value: AddressableEntity) -> StoredValue { - StoredValue::AddressableEntity(value) - } -} -impl From for StoredValue { - fn from(value: Package) -> StoredValue { - StoredValue::Package(value) - } -} - -impl From for StoredValue { - fn from(bid: Bid) -> StoredValue { - StoredValue::Bid(Box::new(bid)) - } -} - -impl From for StoredValue { - fn from(bid_kind: BidKind) -> StoredValue { - StoredValue::BidKind(bid_kind) - } -} - -impl From for StoredValue { - fn from(value: ByteCode) -> StoredValue { - StoredValue::ByteCode(value) - } -} - -impl TryFrom for CLValue { - type Error = TypeMismatch; - - fn try_from(stored_value: StoredValue) -> Result { - let type_name = stored_value.type_name(); - match stored_value { - StoredValue::CLValue(cl_value) => Ok(cl_value), - StoredValue::Package(contract_package) => Ok(CLValue::from_t(contract_package) - .map_err(|_error| TypeMismatch::new("ContractPackage".to_string(), type_name))?), - _ => Err(TypeMismatch::new("CLValue".to_string(), type_name)), - } - } -} - -impl TryFrom for Account { - type Error = TypeMismatch; - - fn try_from(stored_value: StoredValue) -> Result { - match stored_value { - StoredValue::Account(account) => Ok(account), - _ => Err(TypeMismatch::new( - "Account".to_string(), - stored_value.type_name(), - )), - } - } -} - -impl TryFrom for ContractWasm { - type Error = TypeMismatch; - - fn try_from(stored_value: StoredValue) -> Result { - match stored_value { - StoredValue::ContractWasm(contract_wasm) => Ok(contract_wasm), - _ => Err(TypeMismatch::new( - "ContractWasm".to_string(), - stored_value.type_name(), - )), - } - } -} - -impl TryFrom for ByteCode { - type Error = TypeMismatch; - - fn try_from(stored_value: StoredValue) -> Result { - match stored_value { - StoredValue::ByteCode(byte_code) => Ok(byte_code), - _ => Err(TypeMismatch::new( - "ByteCode".to_string(), - stored_value.type_name(), - )), - } - } -} - -impl TryFrom for ContractPackage { - type Error = TypeMismatch; - - fn try_from(value: StoredValue) -> Result { - match value { - StoredValue::ContractPackage(contract_package) => Ok(contract_package), - _ => Err(TypeMismatch::new( - "ContractPackage".to_string(), - value.type_name(), - )), - } - } -} - -impl TryFrom for Contract { - type Error = TypeMismatch; - - fn try_from(stored_value: StoredValue) -> Result { - match stored_value { - StoredValue::Contract(contract) => Ok(contract), - _ => Err(TypeMismatch::new( - "Contract".to_string(), - stored_value.type_name(), - )), - } - } -} - -impl TryFrom for Package { - type Error = TypeMismatch; - - fn try_from(stored_value: StoredValue) -> Result { - match stored_value { - StoredValue::Package(contract_package) => Ok(contract_package), - _ => Err(TypeMismatch::new( - "ContractPackage".to_string(), - stored_value.type_name(), - )), - } - } -} - -impl TryFrom for AddressableEntity { - type Error = TypeMismatch; - - fn try_from(stored_value: StoredValue) -> Result { - match stored_value { - StoredValue::AddressableEntity(contract) => Ok(contract), - _ => Err(TypeMismatch::new( - "AddressableEntity".to_string(), - stored_value.type_name(), - )), - } - } -} - -impl TryFrom for Transfer { - type Error = TypeMismatch; - - fn try_from(value: StoredValue) -> Result { - match value { - StoredValue::Transfer(transfer) => Ok(transfer), - _ => Err(TypeMismatch::new("Transfer".to_string(), value.type_name())), - } - } -} - -impl TryFrom for DeployInfo { - type Error = TypeMismatch; - - fn try_from(value: StoredValue) -> Result { - match value { - StoredValue::DeployInfo(deploy_info) => Ok(deploy_info), - _ => Err(TypeMismatch::new( - "DeployInfo".to_string(), - value.type_name(), - )), - } - } -} - -impl TryFrom for EraInfo { - type Error = TypeMismatch; - - fn try_from(value: StoredValue) -> Result { - match value { - StoredValue::EraInfo(era_info) => Ok(era_info), - _ => Err(TypeMismatch::new("EraInfo".to_string(), value.type_name())), - } - } -} - -impl TryFrom for Bid { - type Error = TypeMismatch; - - fn try_from(value: StoredValue) -> Result { - match value { - StoredValue::Bid(bid) => Ok(*bid), - _ => Err(TypeMismatch::new("Bid".to_string(), value.type_name())), - } - } -} - -impl TryFrom for BidKind { - type Error = TypeMismatch; - - fn try_from(value: StoredValue) -> Result { - match value { - StoredValue::BidKind(bid_kind) => Ok(bid_kind), - _ => Err(TypeMismatch::new("BidKind".to_string(), value.type_name())), - } - } -} - -impl ToBytes for StoredValue { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - U8_SERIALIZED_LENGTH - + match self { - StoredValue::CLValue(cl_value) => cl_value.serialized_length(), - StoredValue::Account(account) => account.serialized_length(), - StoredValue::ContractWasm(contract_wasm) => contract_wasm.serialized_length(), - StoredValue::Contract(contract_header) => contract_header.serialized_length(), - StoredValue::ContractPackage(contract_package) => { - contract_package.serialized_length() - } - StoredValue::Transfer(transfer) => transfer.serialized_length(), - StoredValue::DeployInfo(deploy_info) => deploy_info.serialized_length(), - StoredValue::EraInfo(era_info) => era_info.serialized_length(), - StoredValue::Bid(bid) => bid.serialized_length(), - StoredValue::Withdraw(withdraw_purses) => withdraw_purses.serialized_length(), - StoredValue::Unbonding(unbonding_purses) => unbonding_purses.serialized_length(), - StoredValue::AddressableEntity(entity) => entity.serialized_length(), - StoredValue::BidKind(bid_kind) => bid_kind.serialized_length(), - StoredValue::Package(package) => package.serialized_length(), - StoredValue::ByteCode(byte_code) => byte_code.serialized_length(), - StoredValue::MessageTopic(message_topic_summary) => { - message_topic_summary.serialized_length() - } - StoredValue::Message(message_digest) => message_digest.serialized_length(), - } - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - writer.push(self.tag() as u8); - match self { - StoredValue::CLValue(cl_value) => cl_value.write_bytes(writer)?, - StoredValue::Account(account) => account.write_bytes(writer)?, - StoredValue::ContractWasm(contract_wasm) => contract_wasm.write_bytes(writer)?, - StoredValue::Contract(contract_header) => contract_header.write_bytes(writer)?, - StoredValue::ContractPackage(contract_package) => { - contract_package.write_bytes(writer)? - } - StoredValue::Transfer(transfer) => transfer.write_bytes(writer)?, - StoredValue::DeployInfo(deploy_info) => deploy_info.write_bytes(writer)?, - StoredValue::EraInfo(era_info) => era_info.write_bytes(writer)?, - StoredValue::Bid(bid) => bid.write_bytes(writer)?, - StoredValue::Withdraw(unbonding_purses) => unbonding_purses.write_bytes(writer)?, - StoredValue::Unbonding(unbonding_purses) => unbonding_purses.write_bytes(writer)?, - StoredValue::AddressableEntity(entity) => entity.write_bytes(writer)?, - StoredValue::BidKind(bid_kind) => bid_kind.write_bytes(writer)?, - StoredValue::Package(package) => package.write_bytes(writer)?, - StoredValue::ByteCode(byte_code) => byte_code.write_bytes(writer)?, - StoredValue::MessageTopic(message_topic_summary) => { - message_topic_summary.write_bytes(writer)? - } - StoredValue::Message(message_digest) => message_digest.write_bytes(writer)?, - }; - Ok(()) - } -} - -impl FromBytes for StoredValue { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (tag, remainder) = u8::from_bytes(bytes)?; - match tag { - tag if tag == Tag::CLValue as u8 => CLValue::from_bytes(remainder) - .map(|(cl_value, remainder)| (StoredValue::CLValue(cl_value), remainder)), - tag if tag == Tag::Account as u8 => Account::from_bytes(remainder) - .map(|(account, remainder)| (StoredValue::Account(account), remainder)), - tag if tag == Tag::ContractWasm as u8 => { - ContractWasm::from_bytes(remainder).map(|(contract_wasm, remainder)| { - (StoredValue::ContractWasm(contract_wasm), remainder) - }) - } - tag if tag == Tag::ContractPackage as u8 => { - ContractPackage::from_bytes(remainder).map(|(contract_package, remainder)| { - (StoredValue::ContractPackage(contract_package), remainder) - }) - } - tag if tag == Tag::Contract as u8 => Contract::from_bytes(remainder) - .map(|(contract, remainder)| (StoredValue::Contract(contract), remainder)), - tag if tag == Tag::Transfer as u8 => Transfer::from_bytes(remainder) - .map(|(transfer, remainder)| (StoredValue::Transfer(transfer), remainder)), - tag if tag == Tag::DeployInfo as u8 => DeployInfo::from_bytes(remainder) - .map(|(deploy_info, remainder)| (StoredValue::DeployInfo(deploy_info), remainder)), - tag if tag == Tag::EraInfo as u8 => EraInfo::from_bytes(remainder) - .map(|(deploy_info, remainder)| (StoredValue::EraInfo(deploy_info), remainder)), - tag if tag == Tag::Bid as u8 => Bid::from_bytes(remainder) - .map(|(bid, remainder)| (StoredValue::Bid(Box::new(bid)), remainder)), - tag if tag == Tag::BidKind as u8 => BidKind::from_bytes(remainder) - .map(|(bid_kind, remainder)| (StoredValue::BidKind(bid_kind), remainder)), - tag if tag == Tag::Withdraw as u8 => { - Vec::::from_bytes(remainder).map(|(withdraw_purses, remainder)| { - (StoredValue::Withdraw(withdraw_purses), remainder) - }) - } - tag if tag == Tag::Unbonding as u8 => { - Vec::::from_bytes(remainder).map(|(unbonding_purses, remainder)| { - (StoredValue::Unbonding(unbonding_purses), remainder) - }) - } - tag if tag == Tag::AddressableEntity as u8 => AddressableEntity::from_bytes(remainder) - .map(|(entity, remainder)| (StoredValue::AddressableEntity(entity), remainder)), - tag if tag == Tag::Package as u8 => Package::from_bytes(remainder) - .map(|(package, remainder)| (StoredValue::Package(package), remainder)), - tag if tag == Tag::ByteCode as u8 => ByteCode::from_bytes(remainder) - .map(|(byte_code, remainder)| (StoredValue::ByteCode(byte_code), remainder)), - tag if tag == Tag::MessageTopic as u8 => MessageTopicSummary::from_bytes(remainder) - .map(|(message_summary, remainder)| { - (StoredValue::MessageTopic(message_summary), remainder) - }), - tag if tag == Tag::Message as u8 => MessageChecksum::from_bytes(remainder) - .map(|(checksum, remainder)| (StoredValue::Message(checksum), remainder)), - _ => Err(Error::Formatting), - } - } -} - -mod serde_helpers { - use super::*; - - #[derive(Serialize)] - pub(super) enum BinarySerHelper<'a> { - /// A CLValue. - CLValue(&'a CLValue), - /// An account. - Account(&'a Account), - ContractWasm(&'a ContractWasm), - /// A contract. - Contract(&'a Contract), - /// A `Package`. - ContractPackage(&'a ContractPackage), - /// A `Transfer`. - Transfer(&'a Transfer), - /// Info about a deploy. - DeployInfo(&'a DeployInfo), - /// Info about an era. - EraInfo(&'a EraInfo), - /// Variant that stores [`Bid`]. - Bid(&'a Bid), - /// Variant that stores withdraw information. - Withdraw(&'a Vec), - /// Unbonding information. - Unbonding(&'a Vec), - /// An `AddressableEntity`. - AddressableEntity(&'a AddressableEntity), - /// Variant that stores [`BidKind`]. - BidKind(&'a BidKind), - /// Package. - Package(&'a Package), - /// A record of byte code. - ByteCode(&'a ByteCode), - /// Variant that stores [`MessageTopicSummary`]. - MessageTopic(&'a MessageTopicSummary), - /// Variant that stores a [`MessageChecksum`]. - Message(&'a MessageChecksum), - } - - #[derive(Deserialize)] - pub(super) enum BinaryDeserHelper { - /// A CLValue. - CLValue(CLValue), - /// An account. - Account(Account), - /// A contract wasm. - ContractWasm(ContractWasm), - /// A contract. - Contract(Contract), - /// A `Package`. - ContractPackage(ContractPackage), - /// A `Transfer`. - Transfer(Transfer), - /// Info about a deploy. - DeployInfo(DeployInfo), - /// Info about an era. - EraInfo(EraInfo), - /// Variant that stores [`Bid`]. - Bid(Box), - /// Variant that stores withdraw information. - Withdraw(Vec), - /// Unbonding information. - Unbonding(Vec), - /// An `AddressableEntity`. - AddressableEntity(AddressableEntity), - /// Variant that stores [`BidKind`]. - BidKind(BidKind), - /// A record of a Package. - Package(Package), - /// A record of byte code. - ByteCode(ByteCode), - /// Variant that stores [`MessageTopicSummary`]. - MessageTopic(MessageTopicSummary), - /// Variant that stores [`MessageChecksum`]. - Message(MessageChecksum), - } - - impl<'a> From<&'a StoredValue> for BinarySerHelper<'a> { - fn from(stored_value: &'a StoredValue) -> Self { - match stored_value { - StoredValue::CLValue(payload) => BinarySerHelper::CLValue(payload), - StoredValue::Account(payload) => BinarySerHelper::Account(payload), - StoredValue::ContractWasm(payload) => BinarySerHelper::ContractWasm(payload), - StoredValue::Contract(payload) => BinarySerHelper::Contract(payload), - StoredValue::ContractPackage(payload) => BinarySerHelper::ContractPackage(payload), - StoredValue::Transfer(payload) => BinarySerHelper::Transfer(payload), - StoredValue::DeployInfo(payload) => BinarySerHelper::DeployInfo(payload), - StoredValue::EraInfo(payload) => BinarySerHelper::EraInfo(payload), - StoredValue::Bid(payload) => BinarySerHelper::Bid(payload), - StoredValue::Withdraw(payload) => BinarySerHelper::Withdraw(payload), - StoredValue::Unbonding(payload) => BinarySerHelper::Unbonding(payload), - StoredValue::AddressableEntity(payload) => { - BinarySerHelper::AddressableEntity(payload) - } - StoredValue::BidKind(payload) => BinarySerHelper::BidKind(payload), - StoredValue::Package(payload) => BinarySerHelper::Package(payload), - StoredValue::ByteCode(payload) => BinarySerHelper::ByteCode(payload), - StoredValue::MessageTopic(message_topic_summary) => { - BinarySerHelper::MessageTopic(message_topic_summary) - } - StoredValue::Message(message_digest) => BinarySerHelper::Message(message_digest), - } - } - } - - impl From for StoredValue { - fn from(helper: BinaryDeserHelper) -> Self { - match helper { - BinaryDeserHelper::CLValue(payload) => StoredValue::CLValue(payload), - BinaryDeserHelper::Account(payload) => StoredValue::Account(payload), - BinaryDeserHelper::ContractWasm(payload) => StoredValue::ContractWasm(payload), - BinaryDeserHelper::Contract(payload) => StoredValue::Contract(payload), - BinaryDeserHelper::ContractPackage(payload) => { - StoredValue::ContractPackage(payload) - } - BinaryDeserHelper::Transfer(payload) => StoredValue::Transfer(payload), - BinaryDeserHelper::DeployInfo(payload) => StoredValue::DeployInfo(payload), - BinaryDeserHelper::EraInfo(payload) => StoredValue::EraInfo(payload), - BinaryDeserHelper::Bid(bid) => StoredValue::Bid(bid), - BinaryDeserHelper::Withdraw(payload) => StoredValue::Withdraw(payload), - BinaryDeserHelper::Unbonding(payload) => StoredValue::Unbonding(payload), - BinaryDeserHelper::AddressableEntity(payload) => { - StoredValue::AddressableEntity(payload) - } - BinaryDeserHelper::BidKind(payload) => StoredValue::BidKind(payload), - BinaryDeserHelper::ByteCode(payload) => StoredValue::ByteCode(payload), - BinaryDeserHelper::Package(payload) => StoredValue::Package(payload), - BinaryDeserHelper::MessageTopic(message_topic_summary) => { - StoredValue::MessageTopic(message_topic_summary) - } - BinaryDeserHelper::Message(message_digest) => StoredValue::Message(message_digest), - } - } - } -} - -impl Serialize for StoredValue { - fn serialize(&self, serializer: S) -> Result { - if serializer.is_human_readable() { - serde_helpers::BinarySerHelper::from(self).serialize(serializer) - } else { - let bytes = self - .to_bytes() - .map_err(|error| ser::Error::custom(format!("{:?}", error)))?; - ByteBuf::from(bytes).serialize(serializer) - } - } -} - -impl<'de> Deserialize<'de> for StoredValue { - fn deserialize>(deserializer: D) -> Result { - if deserializer.is_human_readable() { - let json_helper = serde_helpers::BinaryDeserHelper::deserialize(deserializer)?; - Ok(StoredValue::from(json_helper)) - } else { - let bytes = ByteBuf::deserialize(deserializer)?.into_vec(); - bytesrepr::deserialize::(bytes) - .map_err(|error| de::Error::custom(format!("{:?}", error))) - } - } -} - -#[cfg(test)] -mod tests { - use proptest::proptest; - - use crate::{bytesrepr, gens}; - - proptest! { - #[test] - fn serialization_roundtrip(v in gens::stored_value_arb()) { - bytesrepr::test_serialization_roundtrip(&v); - } - } -} diff --git a/casper_types_ver_2_0/src/stored_value/global_state_identifier.rs b/casper_types_ver_2_0/src/stored_value/global_state_identifier.rs deleted file mode 100644 index e99cf27a..00000000 --- a/casper_types_ver_2_0/src/stored_value/global_state_identifier.rs +++ /dev/null @@ -1,127 +0,0 @@ -use alloc::vec::Vec; - -#[cfg(test)] -use rand::Rng; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -#[cfg(test)] -use crate::testing::TestRng; -use crate::{ - bytesrepr::{self, FromBytes, ToBytes, U8_SERIALIZED_LENGTH}, - BlockHash, BlockIdentifier, Digest, -}; - -const BLOCK_HASH_TAG: u8 = 0; -const BLOCK_HEIGHT_TAG: u8 = 1; -const STATE_ROOT_HASH_TAG: u8 = 2; - -/// Identifier for possible ways to query Global State -#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq)] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[serde(deny_unknown_fields)] -pub enum GlobalStateIdentifier { - /// Query using a block hash. - BlockHash(BlockHash), - /// Query using a block height. - BlockHeight(u64), - /// Query using the state root hash. - StateRootHash(Digest), -} - -impl GlobalStateIdentifier { - #[cfg(test)] - pub(crate) fn random(rng: &mut TestRng) -> Self { - match rng.gen_range(0..3) { - 0 => Self::BlockHash(BlockHash::random(rng)), - 1 => Self::BlockHeight(rng.gen()), - 2 => Self::StateRootHash(Digest::random(rng)), - _ => panic!(), - } - } -} - -impl From for GlobalStateIdentifier { - fn from(block_identifier: BlockIdentifier) -> Self { - match block_identifier { - BlockIdentifier::Hash(block_hash) => GlobalStateIdentifier::BlockHash(block_hash), - BlockIdentifier::Height(block_height) => { - GlobalStateIdentifier::BlockHeight(block_height) - } - } - } -} - -impl FromBytes for GlobalStateIdentifier { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - match bytes.split_first() { - Some((&BLOCK_HASH_TAG, rem)) => { - let (block_hash, rem) = FromBytes::from_bytes(rem)?; - Ok((GlobalStateIdentifier::BlockHash(block_hash), rem)) - } - Some((&BLOCK_HEIGHT_TAG, rem)) => { - let (block_height, rem) = FromBytes::from_bytes(rem)?; - Ok((GlobalStateIdentifier::BlockHeight(block_height), rem)) - } - Some((&STATE_ROOT_HASH_TAG, rem)) => { - let (state_root_hash, rem) = FromBytes::from_bytes(rem)?; - Ok((GlobalStateIdentifier::StateRootHash(state_root_hash), rem)) - } - Some(_) | None => Err(bytesrepr::Error::Formatting), - } - } -} - -impl ToBytes for GlobalStateIdentifier { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - match self { - GlobalStateIdentifier::BlockHash(block_hash) => { - writer.push(BLOCK_HASH_TAG); - block_hash.write_bytes(writer)?; - } - GlobalStateIdentifier::BlockHeight(block_height) => { - writer.push(BLOCK_HEIGHT_TAG); - block_height.write_bytes(writer)?; - } - GlobalStateIdentifier::StateRootHash(state_root_hash) => { - writer.push(STATE_ROOT_HASH_TAG); - state_root_hash.write_bytes(writer)?; - } - } - Ok(()) - } - - fn serialized_length(&self) -> usize { - U8_SERIALIZED_LENGTH - + match self { - GlobalStateIdentifier::BlockHash(block_hash) => block_hash.serialized_length(), - GlobalStateIdentifier::BlockHeight(block_height) => { - block_height.serialized_length() - } - GlobalStateIdentifier::StateRootHash(state_root_hash) => { - state_root_hash.serialized_length() - } - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::testing::TestRng; - - #[test] - fn bytesrepr_roundtrip() { - let rng = &mut TestRng::new(); - - let val = GlobalStateIdentifier::random(rng); - bytesrepr::test_serialization_roundtrip(&val); - } -} diff --git a/casper_types_ver_2_0/src/stored_value/type_mismatch.rs b/casper_types_ver_2_0/src/stored_value/type_mismatch.rs deleted file mode 100644 index d866f976..00000000 --- a/casper_types_ver_2_0/src/stored_value/type_mismatch.rs +++ /dev/null @@ -1,68 +0,0 @@ -use alloc::{string::String, vec::Vec}; -use core::fmt::{self, Display, Formatter}; -#[cfg(feature = "std")] -use std::error::Error as StdError; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -use crate::bytesrepr::{self, FromBytes, ToBytes}; - -/// An error struct representing a type mismatch in [`StoredValue`](crate::StoredValue) operations. -#[derive(PartialEq, Eq, Debug, Clone, Serialize, Deserialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -pub struct TypeMismatch { - /// The name of the expected type. - expected: String, - /// The actual type found. - found: String, -} - -impl TypeMismatch { - /// Creates a new `TypeMismatch`. - pub fn new(expected: String, found: String) -> TypeMismatch { - TypeMismatch { expected, found } - } -} - -impl Display for TypeMismatch { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - write!( - formatter, - "Type mismatch. Expected {} but found {}.", - self.expected, self.found - ) - } -} - -impl ToBytes for TypeMismatch { - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.expected.write_bytes(writer)?; - self.found.write_bytes(writer) - } - - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - self.expected.serialized_length() + self.found.serialized_length() - } -} - -impl FromBytes for TypeMismatch { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (expected, remainder) = String::from_bytes(bytes)?; - let (found, remainder) = String::from_bytes(remainder)?; - Ok((TypeMismatch { expected, found }, remainder)) - } -} - -#[cfg(feature = "std")] -impl StdError for TypeMismatch {} diff --git a/casper_types_ver_2_0/src/system.rs b/casper_types_ver_2_0/src/system.rs deleted file mode 100644 index e742b4d3..00000000 --- a/casper_types_ver_2_0/src/system.rs +++ /dev/null @@ -1,12 +0,0 @@ -//! System modules, formerly known as "system contracts" -pub mod auction; -mod call_stack_element; -mod error; -pub mod handle_payment; -pub mod mint; -pub mod standard_payment; -mod system_contract_type; - -pub use call_stack_element::{CallStackElement, CallStackElementTag}; -pub use error::Error; -pub use system_contract_type::{SystemEntityType, AUCTION, HANDLE_PAYMENT, MINT, STANDARD_PAYMENT}; diff --git a/casper_types_ver_2_0/src/system/auction.rs b/casper_types_ver_2_0/src/system/auction.rs deleted file mode 100644 index 85bf7b4f..00000000 --- a/casper_types_ver_2_0/src/system/auction.rs +++ /dev/null @@ -1,279 +0,0 @@ -//! Contains implementation of a Auction contract functionality. -mod bid; -mod bid_addr; -mod bid_kind; -mod constants; -mod delegator; -mod entry_points; -mod era_info; -mod error; -mod seigniorage_recipient; -mod unbonding_purse; -mod validator_bid; -mod withdraw_purse; - -#[cfg(any(all(feature = "std", feature = "testing"), test))] -use alloc::collections::btree_map::Entry; -#[cfg(any(all(feature = "std", feature = "testing"), test))] -use itertools::Itertools; - -use alloc::{boxed::Box, collections::BTreeMap, vec::Vec}; - -pub use bid::{Bid, VESTING_SCHEDULE_LENGTH_MILLIS}; -pub use bid_addr::{BidAddr, BidAddrTag}; -pub use bid_kind::{BidKind, BidKindTag}; -pub use constants::*; -pub use delegator::Delegator; -pub use entry_points::auction_entry_points; -pub use era_info::{EraInfo, SeigniorageAllocation}; -pub use error::Error; -pub use seigniorage_recipient::SeigniorageRecipient; -pub use unbonding_purse::UnbondingPurse; -pub use validator_bid::ValidatorBid; -pub use withdraw_purse::WithdrawPurse; - -#[cfg(any(feature = "testing", test))] -pub(crate) mod gens { - pub use super::era_info::gens::*; -} - -use crate::{account::AccountHash, EraId, PublicKey, U512}; - -/// Representation of delegation rate of tokens. Range from 0..=100. -pub type DelegationRate = u8; - -/// Validators mapped to their bids. -pub type ValidatorBids = BTreeMap>; - -/// Weights of validators. "Weight" in this context means a sum of their stakes. -pub type ValidatorWeights = BTreeMap; - -/// List of era validators -pub type EraValidators = BTreeMap; - -/// Collection of seigniorage recipients. -pub type SeigniorageRecipients = BTreeMap; - -/// Snapshot of `SeigniorageRecipients` for a given era. -pub type SeigniorageRecipientsSnapshot = BTreeMap; - -/// Validators and delegators mapped to their unbonding purses. -pub type UnbondingPurses = BTreeMap>; - -/// Validators and delegators mapped to their withdraw purses. -pub type WithdrawPurses = BTreeMap>; - -/// Aggregated representation of validator and associated delegator bids. -pub type Staking = BTreeMap)>; - -/// Utils for working with a vector of BidKind. -#[cfg(any(all(feature = "std", feature = "testing"), test))] -pub trait BidsExt { - /// Returns Bid matching public_key, if present. - fn unified_bid(&self, public_key: &PublicKey) -> Option; - - /// Returns ValidatorBid matching public_key, if present. - fn validator_bid(&self, public_key: &PublicKey) -> Option; - - /// Returns total validator stake, if present. - fn validator_total_stake(&self, public_key: &PublicKey) -> Option; - - /// Returns Delegator entries matching validator public key, if present. - fn delegators_by_validator_public_key(&self, public_key: &PublicKey) -> Option>; - - /// Returns Delegator entry by public keys, if present. - fn delegator_by_public_keys( - &self, - validator_public_key: &PublicKey, - delegator_public_key: &PublicKey, - ) -> Option; - - /// Returns true if containing any elements matching the provided validator public key. - fn contains_validator_public_key(&self, public_key: &PublicKey) -> bool; - - /// Removes any items with a public key matching the provided validator public key. - fn remove_by_validator_public_key(&mut self, public_key: &PublicKey); - - /// Creates a map of Validator public keys to associated Delegator public keys. - fn public_key_map(&self) -> BTreeMap>; - - /// Inserts if bid_kind does not exist, otherwise replaces. - fn upsert(&mut self, bid_kind: BidKind); -} - -#[cfg(any(all(feature = "std", feature = "testing"), test))] -impl BidsExt for Vec { - fn unified_bid(&self, public_key: &PublicKey) -> Option { - if let BidKind::Unified(bid) = self - .iter() - .find(|x| x.is_validator() && &x.validator_public_key() == public_key)? - { - Some(*bid.clone()) - } else { - None - } - } - - fn validator_bid(&self, public_key: &PublicKey) -> Option { - if let BidKind::Validator(validator_bid) = self - .iter() - .find(|x| x.is_validator() && &x.validator_public_key() == public_key)? - { - Some(*validator_bid.clone()) - } else { - None - } - } - - fn validator_total_stake(&self, public_key: &PublicKey) -> Option { - if let Some(validator_bid) = self.validator_bid(public_key) { - let delegator_stake = { - match self.delegators_by_validator_public_key(validator_bid.validator_public_key()) - { - None => U512::zero(), - Some(delegators) => delegators.iter().map(|x| x.staked_amount()).sum(), - } - }; - return Some(validator_bid.staked_amount() + delegator_stake); - } - - if let BidKind::Unified(bid) = self - .iter() - .find(|x| x.is_validator() && &x.validator_public_key() == public_key)? - { - return Some(*bid.staked_amount()); - } - - None - } - - fn delegators_by_validator_public_key(&self, public_key: &PublicKey) -> Option> { - let mut ret = vec![]; - for delegator in self - .iter() - .filter(|x| x.is_delegator() && &x.validator_public_key() == public_key) - { - if let BidKind::Delegator(delegator) = delegator { - ret.push(*delegator.clone()); - } - } - - if ret.is_empty() { - None - } else { - Some(ret) - } - } - - fn delegator_by_public_keys( - &self, - validator_public_key: &PublicKey, - delegator_public_key: &PublicKey, - ) -> Option { - if let BidKind::Delegator(delegator) = self.iter().find(|x| { - &x.validator_public_key() == validator_public_key - && x.delegator_public_key() == Some(delegator_public_key.clone()) - })? { - Some(*delegator.clone()) - } else { - None - } - } - - fn contains_validator_public_key(&self, public_key: &PublicKey) -> bool { - self.iter().any(|x| &x.validator_public_key() == public_key) - } - - fn remove_by_validator_public_key(&mut self, public_key: &PublicKey) { - self.retain(|x| &x.validator_public_key() != public_key) - } - - fn public_key_map(&self) -> BTreeMap> { - let mut ret = BTreeMap::new(); - let validators = self - .iter() - .filter(|x| x.is_validator()) - .cloned() - .collect_vec(); - for bid_kind in validators { - ret.insert(bid_kind.validator_public_key().clone(), vec![]); - } - let delegators = self - .iter() - .filter(|x| x.is_delegator()) - .cloned() - .collect_vec(); - for bid_kind in delegators { - if let BidKind::Delegator(delegator) = bid_kind { - match ret.entry(delegator.validator_public_key().clone()) { - Entry::Vacant(ve) => { - ve.insert(vec![delegator.delegator_public_key().clone()]); - } - Entry::Occupied(mut oe) => { - let delegators = oe.get_mut(); - delegators.push(delegator.delegator_public_key().clone()) - } - } - } - } - let unified = self - .iter() - .filter(|x| x.is_unified()) - .cloned() - .collect_vec(); - for bid_kind in unified { - if let BidKind::Unified(unified) = bid_kind { - let delegators = unified - .delegators() - .iter() - .map(|(_, y)| y.delegator_public_key().clone()) - .collect(); - ret.insert(unified.validator_public_key().clone(), delegators); - } - } - ret - } - - fn upsert(&mut self, bid_kind: BidKind) { - let maybe_index = match bid_kind { - BidKind::Unified(_) | BidKind::Validator(_) => self - .iter() - .find_position(|x| { - x.validator_public_key() == bid_kind.validator_public_key() - && x.tag() == bid_kind.tag() - }) - .map(|(idx, _)| idx), - BidKind::Delegator(_) => self - .iter() - .find_position(|x| { - x.is_delegator() - && x.validator_public_key() == bid_kind.validator_public_key() - && x.delegator_public_key() == bid_kind.delegator_public_key() - }) - .map(|(idx, _)| idx), - }; - - match maybe_index { - Some(index) => { - self.insert(index, bid_kind); - } - None => { - self.push(bid_kind); - } - } - } -} - -#[cfg(test)] -mod prop_tests { - use proptest::prelude::*; - - use crate::{bytesrepr, gens}; - - proptest! { - #[test] - fn test_value_bid(bid in gens::delegator_arb()) { - bytesrepr::test_serialization_roundtrip(&bid); - } - } -} diff --git a/casper_types_ver_2_0/src/system/auction/bid.rs b/casper_types_ver_2_0/src/system/auction/bid.rs deleted file mode 100644 index 622d8a21..00000000 --- a/casper_types_ver_2_0/src/system/auction/bid.rs +++ /dev/null @@ -1,609 +0,0 @@ -mod vesting; - -use alloc::{collections::BTreeMap, vec::Vec}; -use core::fmt::{self, Display, Formatter}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; -#[cfg(feature = "json-schema")] -use serde_map_to_array::KeyValueJsonSchema; -use serde_map_to_array::{BTreeMapToArray, KeyValueLabels}; - -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - system::auction::{DelegationRate, Delegator, Error, ValidatorBid}, - CLType, CLTyped, PublicKey, URef, U512, -}; - -pub use vesting::{VestingSchedule, VESTING_SCHEDULE_LENGTH_MILLIS}; - -/// An entry in the validator map. -#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Clone)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[serde(deny_unknown_fields)] -pub struct Bid { - /// Validator public key. - validator_public_key: PublicKey, - /// The purse that was used for bonding. - bonding_purse: URef, - /// The amount of tokens staked by a validator (not including delegators). - staked_amount: U512, - /// Delegation rate. - delegation_rate: DelegationRate, - /// Vesting schedule for a genesis validator. `None` if non-genesis validator. - vesting_schedule: Option, - /// This validator's delegators, indexed by their public keys. - #[serde(with = "BTreeMapToArray::")] - delegators: BTreeMap, - /// `true` if validator has been "evicted". - inactive: bool, -} - -impl Bid { - #[allow(missing_docs)] - pub fn from_non_unified( - validator_bid: ValidatorBid, - delegators: BTreeMap, - ) -> Self { - Self { - validator_public_key: validator_bid.validator_public_key().clone(), - bonding_purse: *validator_bid.bonding_purse(), - staked_amount: validator_bid.staked_amount(), - delegation_rate: *validator_bid.delegation_rate(), - vesting_schedule: validator_bid.vesting_schedule().cloned(), - delegators, - inactive: validator_bid.inactive(), - } - } - - /// Creates new instance of a bid with locked funds. - pub fn locked( - validator_public_key: PublicKey, - bonding_purse: URef, - staked_amount: U512, - delegation_rate: DelegationRate, - release_timestamp_millis: u64, - ) -> Self { - let vesting_schedule = Some(VestingSchedule::new(release_timestamp_millis)); - let delegators = BTreeMap::new(); - let inactive = false; - Self { - validator_public_key, - bonding_purse, - staked_amount, - delegation_rate, - vesting_schedule, - delegators, - inactive, - } - } - - /// Creates new instance of a bid with unlocked funds. - pub fn unlocked( - validator_public_key: PublicKey, - bonding_purse: URef, - staked_amount: U512, - delegation_rate: DelegationRate, - ) -> Self { - let vesting_schedule = None; - let delegators = BTreeMap::new(); - let inactive = false; - Self { - validator_public_key, - bonding_purse, - staked_amount, - delegation_rate, - vesting_schedule, - delegators, - inactive, - } - } - - /// Creates a new inactive instance of a bid with 0 staked amount. - pub fn empty(validator_public_key: PublicKey, bonding_purse: URef) -> Self { - let vesting_schedule = None; - let delegators = BTreeMap::new(); - let inactive = true; - let staked_amount = 0.into(); - let delegation_rate = Default::default(); - Self { - validator_public_key, - bonding_purse, - staked_amount, - delegation_rate, - vesting_schedule, - delegators, - inactive, - } - } - - /// Gets the validator public key of the provided bid - pub fn validator_public_key(&self) -> &PublicKey { - &self.validator_public_key - } - - /// Gets the bonding purse of the provided bid - pub fn bonding_purse(&self) -> &URef { - &self.bonding_purse - } - - /// Checks if a bid is still locked under a vesting schedule. - /// - /// Returns true if a timestamp falls below the initial lockup period + 91 days release - /// schedule, otherwise false. - pub fn is_locked(&self, timestamp_millis: u64) -> bool { - self.is_locked_with_vesting_schedule(timestamp_millis, VESTING_SCHEDULE_LENGTH_MILLIS) - } - - /// Checks if a bid is still locked under a vesting schedule. - /// - /// Returns true if a timestamp falls below the initial lockup period + 91 days release - /// schedule, otherwise false. - pub fn is_locked_with_vesting_schedule( - &self, - timestamp_millis: u64, - vesting_schedule_period_millis: u64, - ) -> bool { - match &self.vesting_schedule { - Some(vesting_schedule) => { - vesting_schedule.is_vesting(timestamp_millis, vesting_schedule_period_millis) - } - None => false, - } - } - - /// Gets the staked amount of the provided bid - pub fn staked_amount(&self) -> &U512 { - &self.staked_amount - } - - /// Gets the staked amount of the provided bid - pub fn staked_amount_mut(&mut self) -> &mut U512 { - &mut self.staked_amount - } - - /// Gets the delegation rate of the provided bid - pub fn delegation_rate(&self) -> &DelegationRate { - &self.delegation_rate - } - - /// Returns a reference to the vesting schedule of the provided bid. `None` if a non-genesis - /// validator. - pub fn vesting_schedule(&self) -> Option<&VestingSchedule> { - self.vesting_schedule.as_ref() - } - - /// Returns a mutable reference to the vesting schedule of the provided bid. `None` if a - /// non-genesis validator. - pub fn vesting_schedule_mut(&mut self) -> Option<&mut VestingSchedule> { - self.vesting_schedule.as_mut() - } - - /// Returns a reference to the delegators of the provided bid - pub fn delegators(&self) -> &BTreeMap { - &self.delegators - } - - /// Returns a mutable reference to the delegators of the provided bid - pub fn delegators_mut(&mut self) -> &mut BTreeMap { - &mut self.delegators - } - - /// Returns `true` if validator is inactive - pub fn inactive(&self) -> bool { - self.inactive - } - - /// Decreases the stake of the provided bid - pub fn decrease_stake( - &mut self, - amount: U512, - era_end_timestamp_millis: u64, - ) -> Result { - let updated_staked_amount = self - .staked_amount - .checked_sub(amount) - .ok_or(Error::UnbondTooLarge)?; - - let vesting_schedule = match self.vesting_schedule.as_ref() { - Some(vesting_schedule) => vesting_schedule, - None => { - self.staked_amount = updated_staked_amount; - return Ok(updated_staked_amount); - } - }; - - match vesting_schedule.locked_amount(era_end_timestamp_millis) { - Some(locked_amount) if updated_staked_amount < locked_amount => { - Err(Error::ValidatorFundsLocked) - } - None => { - // If `None`, then the locked amounts table has yet to be initialized (likely - // pre-90 day mark) - Err(Error::ValidatorFundsLocked) - } - Some(_) => { - self.staked_amount = updated_staked_amount; - Ok(updated_staked_amount) - } - } - } - - /// Increases the stake of the provided bid - pub fn increase_stake(&mut self, amount: U512) -> Result { - let updated_staked_amount = self - .staked_amount - .checked_add(amount) - .ok_or(Error::InvalidAmount)?; - - self.staked_amount = updated_staked_amount; - - Ok(updated_staked_amount) - } - - /// Updates the delegation rate of the provided bid - pub fn with_delegation_rate(&mut self, delegation_rate: DelegationRate) -> &mut Self { - self.delegation_rate = delegation_rate; - self - } - - /// Initializes the vesting schedule of provided bid if the provided timestamp is greater than - /// or equal to the bid's initial release timestamp and the bid is owned by a genesis - /// validator. This method initializes with default 14 week vesting schedule. - /// - /// Returns `true` if the provided bid's vesting schedule was initialized. - pub fn process(&mut self, timestamp_millis: u64) -> bool { - self.process_with_vesting_schedule(timestamp_millis, VESTING_SCHEDULE_LENGTH_MILLIS) - } - - /// Initializes the vesting schedule of provided bid if the provided timestamp is greater than - /// or equal to the bid's initial release timestamp and the bid is owned by a genesis - /// validator. - /// - /// Returns `true` if the provided bid's vesting schedule was initialized. - pub fn process_with_vesting_schedule( - &mut self, - timestamp_millis: u64, - vesting_schedule_period_millis: u64, - ) -> bool { - // Put timestamp-sensitive processing logic in here - let staked_amount = self.staked_amount; - let vesting_schedule = match self.vesting_schedule_mut() { - Some(vesting_schedule) => vesting_schedule, - None => return false, - }; - if timestamp_millis < vesting_schedule.initial_release_timestamp_millis() { - return false; - } - - let mut initialized = false; - - if vesting_schedule.initialize_with_schedule(staked_amount, vesting_schedule_period_millis) - { - initialized = true; - } - - for delegator in self.delegators_mut().values_mut() { - let staked_amount = delegator.staked_amount(); - if let Some(vesting_schedule) = delegator.vesting_schedule_mut() { - if timestamp_millis >= vesting_schedule.initial_release_timestamp_millis() - && vesting_schedule - .initialize_with_schedule(staked_amount, vesting_schedule_period_millis) - { - initialized = true; - } - } - } - - initialized - } - - /// Sets given bid's `inactive` field to `false` - pub fn activate(&mut self) -> bool { - self.inactive = false; - false - } - - /// Sets given bid's `inactive` field to `true` - pub fn deactivate(&mut self) -> bool { - self.inactive = true; - true - } - - /// Returns the total staked amount of validator + all delegators - pub fn total_staked_amount(&self) -> Result { - self.delegators - .iter() - .try_fold(U512::zero(), |a, (_, b)| a.checked_add(b.staked_amount())) - .and_then(|delegators_sum| delegators_sum.checked_add(*self.staked_amount())) - .ok_or(Error::InvalidAmount) - } -} - -impl CLTyped for Bid { - fn cl_type() -> CLType { - CLType::Any - } -} - -impl ToBytes for Bid { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - self.validator_public_key.serialized_length() - + self.bonding_purse.serialized_length() - + self.staked_amount.serialized_length() - + self.delegation_rate.serialized_length() - + self.vesting_schedule.serialized_length() - + self.delegators.serialized_length() - + self.inactive.serialized_length() - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.validator_public_key.write_bytes(writer)?; - self.bonding_purse.write_bytes(writer)?; - self.staked_amount.write_bytes(writer)?; - self.delegation_rate.write_bytes(writer)?; - self.vesting_schedule.write_bytes(writer)?; - self.delegators.write_bytes(writer)?; - self.inactive.write_bytes(writer)?; - Ok(()) - } -} - -impl FromBytes for Bid { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (validator_public_key, bytes) = FromBytes::from_bytes(bytes)?; - let (bonding_purse, bytes) = FromBytes::from_bytes(bytes)?; - let (staked_amount, bytes) = FromBytes::from_bytes(bytes)?; - let (delegation_rate, bytes) = FromBytes::from_bytes(bytes)?; - let (vesting_schedule, bytes) = FromBytes::from_bytes(bytes)?; - let (delegators, bytes) = FromBytes::from_bytes(bytes)?; - let (inactive, bytes) = FromBytes::from_bytes(bytes)?; - Ok(( - Bid { - validator_public_key, - bonding_purse, - staked_amount, - delegation_rate, - vesting_schedule, - delegators, - inactive, - }, - bytes, - )) - } -} - -impl Display for Bid { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - write!( - formatter, - "bid {{ bonding purse {}, staked {}, delegation rate {}, delegators {{", - self.bonding_purse, self.staked_amount, self.delegation_rate - )?; - - let count = self.delegators.len(); - for (index, delegator) in self.delegators.values().enumerate() { - write!( - formatter, - "{}{}", - delegator, - if index + 1 == count { "" } else { ", " } - )?; - } - - write!( - formatter, - "}}, is {}inactive }}", - if self.inactive { "" } else { "not " } - ) - } -} - -struct DelegatorLabels; - -impl KeyValueLabels for DelegatorLabels { - const KEY: &'static str = "delegator_public_key"; - const VALUE: &'static str = "delegator"; -} - -#[cfg(feature = "json-schema")] -impl KeyValueJsonSchema for DelegatorLabels { - const JSON_SCHEMA_KV_NAME: Option<&'static str> = Some("PublicKeyAndDelegator"); - const JSON_SCHEMA_KV_DESCRIPTION: Option<&'static str> = - Some("A delegator associated with the given validator."); - const JSON_SCHEMA_KEY_DESCRIPTION: Option<&'static str> = - Some("The public key of the delegator."); - const JSON_SCHEMA_VALUE_DESCRIPTION: Option<&'static str> = Some("The delegator details."); -} - -#[cfg(test)] -mod tests { - use alloc::collections::BTreeMap; - - use crate::{ - bytesrepr, - system::auction::{bid::VestingSchedule, Bid, DelegationRate, Delegator}, - AccessRights, PublicKey, SecretKey, URef, U512, - }; - - const WEEK_MILLIS: u64 = 7 * 24 * 60 * 60 * 1000; - const TEST_VESTING_SCHEDULE_LENGTH_MILLIS: u64 = 7 * WEEK_MILLIS; - - #[test] - fn serialization_roundtrip() { - let founding_validator = Bid { - validator_public_key: PublicKey::from( - &SecretKey::ed25519_from_bytes([0u8; SecretKey::ED25519_LENGTH]).unwrap(), - ), - bonding_purse: URef::new([42; 32], AccessRights::READ_ADD_WRITE), - staked_amount: U512::one(), - delegation_rate: DelegationRate::max_value(), - vesting_schedule: Some(VestingSchedule::default()), - delegators: BTreeMap::default(), - inactive: true, - }; - bytesrepr::test_serialization_roundtrip(&founding_validator); - } - - #[test] - fn should_immediately_initialize_unlock_amounts() { - const TIMESTAMP_MILLIS: u64 = 0; - - let validator_pk: PublicKey = (&SecretKey::ed25519_from_bytes([42; 32]).unwrap()).into(); - - let validator_release_timestamp = TIMESTAMP_MILLIS; - let vesting_schedule_period_millis = TIMESTAMP_MILLIS; - let validator_bonding_purse = URef::new([42; 32], AccessRights::ADD); - let validator_staked_amount = U512::from(1000); - let validator_delegation_rate = 0; - - let mut bid = Bid::locked( - validator_pk, - validator_bonding_purse, - validator_staked_amount, - validator_delegation_rate, - validator_release_timestamp, - ); - - assert!(bid.process_with_vesting_schedule( - validator_release_timestamp, - vesting_schedule_period_millis, - )); - assert!(!bid.is_locked_with_vesting_schedule( - validator_release_timestamp, - vesting_schedule_period_millis - )); - } - - #[test] - fn should_initialize_delegators_different_timestamps() { - const TIMESTAMP_MILLIS: u64 = WEEK_MILLIS; - - let validator_pk: PublicKey = (&SecretKey::ed25519_from_bytes([42; 32]).unwrap()).into(); - - let delegator_1_pk: PublicKey = (&SecretKey::ed25519_from_bytes([43; 32]).unwrap()).into(); - let delegator_2_pk: PublicKey = (&SecretKey::ed25519_from_bytes([44; 32]).unwrap()).into(); - - let validator_release_timestamp = TIMESTAMP_MILLIS; - let validator_bonding_purse = URef::new([42; 32], AccessRights::ADD); - let validator_staked_amount = U512::from(1000); - let validator_delegation_rate = 0; - - let delegator_1_release_timestamp = TIMESTAMP_MILLIS + 1; - let delegator_1_bonding_purse = URef::new([52; 32], AccessRights::ADD); - let delegator_1_staked_amount = U512::from(2000); - - let delegator_2_release_timestamp = TIMESTAMP_MILLIS + 2; - let delegator_2_bonding_purse = URef::new([62; 32], AccessRights::ADD); - let delegator_2_staked_amount = U512::from(3000); - - let delegator_1 = Delegator::locked( - delegator_1_pk.clone(), - delegator_1_staked_amount, - delegator_1_bonding_purse, - validator_pk.clone(), - delegator_1_release_timestamp, - ); - - let delegator_2 = Delegator::locked( - delegator_2_pk.clone(), - delegator_2_staked_amount, - delegator_2_bonding_purse, - validator_pk.clone(), - delegator_2_release_timestamp, - ); - - let mut bid = Bid::locked( - validator_pk, - validator_bonding_purse, - validator_staked_amount, - validator_delegation_rate, - validator_release_timestamp, - ); - - assert!(!bid.process_with_vesting_schedule( - validator_release_timestamp - 1, - TEST_VESTING_SCHEDULE_LENGTH_MILLIS - )); - - { - let delegators = bid.delegators_mut(); - - delegators.insert(delegator_1_pk.clone(), delegator_1); - delegators.insert(delegator_2_pk.clone(), delegator_2); - } - - assert!(bid.process_with_vesting_schedule( - delegator_1_release_timestamp, - TEST_VESTING_SCHEDULE_LENGTH_MILLIS - )); - - let delegator_1_updated_1 = bid.delegators().get(&delegator_1_pk).cloned().unwrap(); - assert!(delegator_1_updated_1 - .vesting_schedule() - .unwrap() - .locked_amounts() - .is_some()); - - let delegator_2_updated_1 = bid.delegators().get(&delegator_2_pk).cloned().unwrap(); - assert!(delegator_2_updated_1 - .vesting_schedule() - .unwrap() - .locked_amounts() - .is_none()); - - assert!(bid.process_with_vesting_schedule( - delegator_2_release_timestamp, - TEST_VESTING_SCHEDULE_LENGTH_MILLIS - )); - - let delegator_1_updated_2 = bid.delegators().get(&delegator_1_pk).cloned().unwrap(); - assert!(delegator_1_updated_2 - .vesting_schedule() - .unwrap() - .locked_amounts() - .is_some()); - // Delegator 1 is already initialized and did not change after 2nd Bid::process - assert_eq!(delegator_1_updated_1, delegator_1_updated_2); - - let delegator_2_updated_2 = bid.delegators().get(&delegator_2_pk).cloned().unwrap(); - assert!(delegator_2_updated_2 - .vesting_schedule() - .unwrap() - .locked_amounts() - .is_some()); - - // Delegator 2 is different compared to first Bid::process - assert_ne!(delegator_2_updated_1, delegator_2_updated_2); - - // Validator initialized, and all delegators initialized - assert!(!bid.process_with_vesting_schedule( - delegator_2_release_timestamp + 1, - TEST_VESTING_SCHEDULE_LENGTH_MILLIS - )); - } -} - -#[cfg(test)] -mod prop_tests { - use proptest::prelude::*; - - use crate::{bytesrepr, gens}; - - proptest! { - #[test] - fn test_unified_bid(bid in gens::unified_bid_arb(0..3)) { - bytesrepr::test_serialization_roundtrip(&bid); - } - } -} diff --git a/casper_types_ver_2_0/src/system/auction/bid/vesting.rs b/casper_types_ver_2_0/src/system/auction/bid/vesting.rs deleted file mode 100644 index ae496a4b..00000000 --- a/casper_types_ver_2_0/src/system/auction/bid/vesting.rs +++ /dev/null @@ -1,520 +0,0 @@ -use alloc::vec::Vec; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -use crate::{ - bytesrepr::{self, Error, FromBytes, ToBytes}, - U512, -}; - -const DAY_MILLIS: usize = 24 * 60 * 60 * 1000; -const DAYS_IN_WEEK: usize = 7; -const WEEK_MILLIS: usize = DAYS_IN_WEEK * DAY_MILLIS; - -/// Length of total vesting schedule in days. -const VESTING_SCHEDULE_LENGTH_DAYS: usize = 91; -/// Length of total vesting schedule expressed in days. -pub const VESTING_SCHEDULE_LENGTH_MILLIS: u64 = - VESTING_SCHEDULE_LENGTH_DAYS as u64 * DAY_MILLIS as u64; -/// 91 days / 7 days in a week = 13 weeks -const LOCKED_AMOUNTS_MAX_LENGTH: usize = (VESTING_SCHEDULE_LENGTH_DAYS / DAYS_IN_WEEK) + 1; - -#[derive(Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[serde(deny_unknown_fields)] -pub struct VestingSchedule { - initial_release_timestamp_millis: u64, - locked_amounts: Option<[U512; LOCKED_AMOUNTS_MAX_LENGTH]>, -} - -fn vesting_schedule_period_to_weeks(vesting_schedule_period_millis: u64) -> usize { - debug_assert_ne!(DAY_MILLIS, 0); - debug_assert_ne!(DAYS_IN_WEEK, 0); - vesting_schedule_period_millis as usize / DAY_MILLIS / DAYS_IN_WEEK -} - -impl VestingSchedule { - pub fn new(initial_release_timestamp_millis: u64) -> Self { - let locked_amounts = None; - VestingSchedule { - initial_release_timestamp_millis, - locked_amounts, - } - } - - /// Initializes vesting schedule with a configured amount of weekly releases. - /// - /// Returns `false` if already initialized. - /// - /// # Panics - /// - /// Panics if `vesting_schedule_period_millis` represents more than 13 weeks. - pub fn initialize_with_schedule( - &mut self, - staked_amount: U512, - vesting_schedule_period_millis: u64, - ) -> bool { - if self.locked_amounts.is_some() { - return false; - } - - let locked_amounts_length = - vesting_schedule_period_to_weeks(vesting_schedule_period_millis); - - assert!( - locked_amounts_length < LOCKED_AMOUNTS_MAX_LENGTH, - "vesting schedule period must be less than {} weeks", - LOCKED_AMOUNTS_MAX_LENGTH, - ); - - if locked_amounts_length == 0 || vesting_schedule_period_millis == 0 { - // Zero weeks means instant unlock of staked amount. - self.locked_amounts = Some(Default::default()); - return true; - } - - let release_period: U512 = U512::from(locked_amounts_length + 1); - let weekly_release = staked_amount / release_period; - - let mut locked_amounts = [U512::zero(); LOCKED_AMOUNTS_MAX_LENGTH]; - let mut remaining_locked = staked_amount; - - for locked_amount in locked_amounts.iter_mut().take(locked_amounts_length) { - remaining_locked -= weekly_release; - *locked_amount = remaining_locked; - } - - assert_eq!( - locked_amounts.get(locked_amounts_length), - Some(&U512::zero()), - "first element after the schedule should be zero" - ); - - self.locked_amounts = Some(locked_amounts); - true - } - - /// Initializes weekly release for a fixed amount of 14 weeks period. - /// - /// Returns `false` if already initialized. - pub fn initialize(&mut self, staked_amount: U512) -> bool { - self.initialize_with_schedule(staked_amount, VESTING_SCHEDULE_LENGTH_MILLIS) - } - - pub fn initial_release_timestamp_millis(&self) -> u64 { - self.initial_release_timestamp_millis - } - - pub fn locked_amounts(&self) -> Option<&[U512]> { - let locked_amounts = self.locked_amounts.as_ref()?; - Some(locked_amounts.as_slice()) - } - - pub fn locked_amount(&self, timestamp_millis: u64) -> Option { - let locked_amounts = self.locked_amounts()?; - - let index = { - let index_timestamp = - timestamp_millis.checked_sub(self.initial_release_timestamp_millis)?; - (index_timestamp as usize).checked_div(WEEK_MILLIS)? - }; - - let locked_amount = locked_amounts.get(index).cloned().unwrap_or_default(); - - Some(locked_amount) - } - - /// Checks if this vesting schedule is still under the vesting - pub(crate) fn is_vesting( - &self, - timestamp_millis: u64, - vesting_schedule_period_millis: u64, - ) -> bool { - let vested_period = match self.locked_amounts() { - Some(locked_amounts) => { - let vesting_weeks = locked_amounts - .iter() - .position(|amount| amount.is_zero()) - .expect("vesting schedule should always have zero at the end"); // SAFETY: at least one zero is guaranteed by `initialize_with_schedule` method - - let vesting_weeks_millis = - (vesting_weeks as u64).saturating_mul(WEEK_MILLIS as u64); - - self.initial_release_timestamp_millis() - .saturating_add(vesting_weeks_millis) - } - None => { - // Uninitialized yet but we know this will be the configured period of time. - self.initial_release_timestamp_millis() - .saturating_add(vesting_schedule_period_millis) - } - }; - - timestamp_millis < vested_period - } -} - -impl ToBytes for [U512; LOCKED_AMOUNTS_MAX_LENGTH] { - fn to_bytes(&self) -> Result, Error> { - let mut result = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut result)?; - Ok(result) - } - - fn serialized_length(&self) -> usize { - self.iter().map(ToBytes::serialized_length).sum::() - } - - #[inline] - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - for amount in self { - amount.write_bytes(writer)?; - } - Ok(()) - } -} - -impl FromBytes for [U512; LOCKED_AMOUNTS_MAX_LENGTH] { - fn from_bytes(mut bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let mut result = [U512::zero(); LOCKED_AMOUNTS_MAX_LENGTH]; - for value in &mut result { - let (amount, rem) = FromBytes::from_bytes(bytes)?; - *value = amount; - bytes = rem; - } - Ok((result, bytes)) - } -} - -impl ToBytes for VestingSchedule { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut result = bytesrepr::allocate_buffer(self)?; - result.append(&mut self.initial_release_timestamp_millis.to_bytes()?); - result.append(&mut self.locked_amounts.to_bytes()?); - Ok(result) - } - - fn serialized_length(&self) -> usize { - self.initial_release_timestamp_millis.serialized_length() - + self.locked_amounts.serialized_length() - } -} - -impl FromBytes for VestingSchedule { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (initial_release_timestamp_millis, bytes) = FromBytes::from_bytes(bytes)?; - let (locked_amounts, bytes) = FromBytes::from_bytes(bytes)?; - Ok(( - VestingSchedule { - initial_release_timestamp_millis, - locked_amounts, - }, - bytes, - )) - } -} - -/// Generators for [`VestingSchedule`] -#[cfg(test)] -mod gens { - use proptest::{ - array, option, - prelude::{Arbitrary, Strategy}, - }; - - use super::VestingSchedule; - use crate::gens::u512_arb; - - pub fn vesting_schedule_arb() -> impl Strategy { - (::arbitrary(), option::of(array::uniform14(u512_arb()))).prop_map( - |(initial_release_timestamp_millis, locked_amounts)| VestingSchedule { - initial_release_timestamp_millis, - locked_amounts, - }, - ) - } -} - -#[cfg(test)] -mod tests { - use proptest::{prop_assert, proptest}; - - use crate::{ - bytesrepr, - gens::u512_arb, - system::auction::bid::{ - vesting::{gens::vesting_schedule_arb, vesting_schedule_period_to_weeks, WEEK_MILLIS}, - VestingSchedule, - }, - U512, - }; - - use super::*; - - /// Default lock-in period of 90 days - const DEFAULT_LOCKED_FUNDS_PERIOD_MILLIS: u64 = 90 * DAY_MILLIS as u64; - const RELEASE_TIMESTAMP: u64 = DEFAULT_LOCKED_FUNDS_PERIOD_MILLIS; - const STAKE: u64 = 140; - - const DEFAULT_VESTING_SCHEDULE_PERIOD_MILLIS: u64 = 91 * DAY_MILLIS as u64; - const LOCKED_AMOUNTS_LENGTH: usize = - (DEFAULT_VESTING_SCHEDULE_PERIOD_MILLIS as usize / WEEK_MILLIS) + 1; - - #[test] - #[should_panic = "vesting schedule period must be less than"] - fn test_vesting_schedule_exceeding_the_maximum_should_not_panic() { - let future_date = 98 * DAY_MILLIS as u64; - let mut vesting_schedule = VestingSchedule::new(RELEASE_TIMESTAMP); - vesting_schedule.initialize_with_schedule(U512::from(STAKE), future_date); - - assert_eq!(vesting_schedule.locked_amount(0), None); - assert_eq!(vesting_schedule.locked_amount(RELEASE_TIMESTAMP - 1), None); - } - - #[test] - fn test_locked_amount_check_should_not_panic() { - let mut vesting_schedule = VestingSchedule::new(RELEASE_TIMESTAMP); - vesting_schedule.initialize(U512::from(STAKE)); - - assert_eq!(vesting_schedule.locked_amount(0), None); - assert_eq!(vesting_schedule.locked_amount(RELEASE_TIMESTAMP - 1), None); - } - - #[test] - fn test_locked_with_zero_length_schedule_should_not_panic() { - let mut vesting_schedule = VestingSchedule::new(RELEASE_TIMESTAMP); - vesting_schedule.initialize_with_schedule(U512::from(STAKE), 0); - - assert_eq!(vesting_schedule.locked_amount(0), None); - assert_eq!(vesting_schedule.locked_amount(RELEASE_TIMESTAMP - 1), None); - } - - #[test] - fn test_locked_amount() { - let mut vesting_schedule = VestingSchedule::new(RELEASE_TIMESTAMP); - vesting_schedule.initialize(U512::from(STAKE)); - - let mut timestamp = RELEASE_TIMESTAMP; - - assert_eq!( - vesting_schedule.locked_amount(timestamp), - Some(U512::from(130)) - ); - - timestamp = RELEASE_TIMESTAMP + WEEK_MILLIS as u64 - 1; - assert_eq!( - vesting_schedule.locked_amount(timestamp), - Some(U512::from(130)) - ); - - timestamp = RELEASE_TIMESTAMP + WEEK_MILLIS as u64; - assert_eq!( - vesting_schedule.locked_amount(timestamp), - Some(U512::from(120)) - ); - - timestamp = RELEASE_TIMESTAMP + WEEK_MILLIS as u64 + 1; - assert_eq!( - vesting_schedule.locked_amount(timestamp), - Some(U512::from(120)) - ); - - timestamp = RELEASE_TIMESTAMP + (WEEK_MILLIS as u64 * 2) - 1; - assert_eq!( - vesting_schedule.locked_amount(timestamp), - Some(U512::from(120)) - ); - - timestamp = RELEASE_TIMESTAMP + (WEEK_MILLIS as u64 * 2); - assert_eq!( - vesting_schedule.locked_amount(timestamp), - Some(U512::from(110)) - ); - - timestamp = RELEASE_TIMESTAMP + (WEEK_MILLIS as u64 * 2) + 1; - assert_eq!( - vesting_schedule.locked_amount(timestamp), - Some(U512::from(110)) - ); - - timestamp = RELEASE_TIMESTAMP + (WEEK_MILLIS as u64 * 3) - 1; - assert_eq!( - vesting_schedule.locked_amount(timestamp), - Some(U512::from(110)) - ); - - timestamp = RELEASE_TIMESTAMP + (WEEK_MILLIS as u64 * 3); - assert_eq!( - vesting_schedule.locked_amount(timestamp), - Some(U512::from(100)) - ); - - timestamp = RELEASE_TIMESTAMP + (WEEK_MILLIS as u64 * 3) + 1; - assert_eq!( - vesting_schedule.locked_amount(timestamp), - Some(U512::from(100)) - ); - - timestamp = RELEASE_TIMESTAMP + (WEEK_MILLIS as u64 * 12) - 1; - assert_eq!( - vesting_schedule.locked_amount(timestamp), - Some(U512::from(20)) - ); - - timestamp = RELEASE_TIMESTAMP + (WEEK_MILLIS as u64 * 12); - assert_eq!( - vesting_schedule.locked_amount(timestamp), - Some(U512::from(10)) - ); - - timestamp = RELEASE_TIMESTAMP + (WEEK_MILLIS as u64 * 12) + 1; - assert_eq!( - vesting_schedule.locked_amount(timestamp), - Some(U512::from(10)) - ); - - timestamp = RELEASE_TIMESTAMP + (WEEK_MILLIS as u64 * 13) - 1; - assert_eq!( - vesting_schedule.locked_amount(timestamp), - Some(U512::from(10)) - ); - - timestamp = RELEASE_TIMESTAMP + (WEEK_MILLIS as u64 * 13); - assert_eq!( - vesting_schedule.locked_amount(timestamp), - Some(U512::from(0)) - ); - - timestamp = RELEASE_TIMESTAMP + (WEEK_MILLIS as u64 * 13) + 1; - assert_eq!( - vesting_schedule.locked_amount(timestamp), - Some(U512::from(0)) - ); - - timestamp = RELEASE_TIMESTAMP + (WEEK_MILLIS as u64 * 14) - 1; - assert_eq!( - vesting_schedule.locked_amount(timestamp), - Some(U512::from(0)) - ); - - timestamp = RELEASE_TIMESTAMP + (WEEK_MILLIS as u64 * 14); - assert_eq!( - vesting_schedule.locked_amount(timestamp), - Some(U512::from(0)) - ); - - timestamp = RELEASE_TIMESTAMP + (WEEK_MILLIS as u64 * 14) + 1; - assert_eq!( - vesting_schedule.locked_amount(timestamp), - Some(U512::from(0)) - ); - } - - fn vested_amounts_match_initial_stake( - initial_stake: U512, - release_timestamp: u64, - vesting_schedule_length: u64, - ) -> bool { - let mut vesting_schedule = VestingSchedule::new(release_timestamp); - vesting_schedule.initialize_with_schedule(initial_stake, vesting_schedule_length); - - let mut total_vested_amounts = U512::zero(); - - for i in 0..LOCKED_AMOUNTS_LENGTH { - let timestamp = release_timestamp + (WEEK_MILLIS * i) as u64; - if let Some(locked_amount) = vesting_schedule.locked_amount(timestamp) { - let current_vested_amount = initial_stake - locked_amount - total_vested_amounts; - total_vested_amounts += current_vested_amount - } - } - - total_vested_amounts == initial_stake - } - - #[test] - fn vested_amounts_conserve_stake() { - let stake = U512::from(1000); - assert!(vested_amounts_match_initial_stake( - stake, - DEFAULT_LOCKED_FUNDS_PERIOD_MILLIS, - DEFAULT_VESTING_SCHEDULE_PERIOD_MILLIS, - )) - } - - #[test] - fn is_vesting_with_default_schedule() { - let initial_stake = U512::from(1000u64); - let release_timestamp = DEFAULT_LOCKED_FUNDS_PERIOD_MILLIS; - let mut vesting_schedule = VestingSchedule::new(release_timestamp); - - let is_vesting_before: Vec = (0..LOCKED_AMOUNTS_LENGTH + 1) - .map(|i| { - vesting_schedule.is_vesting( - release_timestamp + (WEEK_MILLIS * i) as u64, - DEFAULT_VESTING_SCHEDULE_PERIOD_MILLIS, - ) - }) - .collect(); - - assert_eq!( - is_vesting_before, - vec![ - true, true, true, true, true, true, true, true, true, true, true, true, true, - false, // week after is always set to zero - false - ] - ); - vesting_schedule.initialize(initial_stake); - - let is_vesting_after: Vec = (0..LOCKED_AMOUNTS_LENGTH + 1) - .map(|i| { - vesting_schedule.is_vesting( - release_timestamp + (WEEK_MILLIS * i) as u64, - DEFAULT_VESTING_SCHEDULE_PERIOD_MILLIS, - ) - }) - .collect(); - - assert_eq!( - is_vesting_after, - vec![ - true, true, true, true, true, true, true, true, true, true, true, true, true, - false, // week after is always set to zero - false, - ] - ); - } - - #[test] - fn should_calculate_vesting_schedule_period_to_weeks() { - let thirteen_weeks_millis = 13 * 7 * DAY_MILLIS as u64; - assert_eq!(vesting_schedule_period_to_weeks(thirteen_weeks_millis), 13,); - - assert_eq!(vesting_schedule_period_to_weeks(0), 0); - assert_eq!( - vesting_schedule_period_to_weeks(u64::MAX), - 30_500_568_904usize - ); - } - - proptest! { - #[test] - fn prop_total_vested_amounts_conserve_stake(stake in u512_arb()) { - prop_assert!(vested_amounts_match_initial_stake( - stake, - DEFAULT_LOCKED_FUNDS_PERIOD_MILLIS, - DEFAULT_VESTING_SCHEDULE_PERIOD_MILLIS, - )) - } - - #[test] - fn prop_serialization_roundtrip(vesting_schedule in vesting_schedule_arb()) { - bytesrepr::test_serialization_roundtrip(&vesting_schedule) - } - } -} diff --git a/casper_types_ver_2_0/src/system/auction/bid_addr.rs b/casper_types_ver_2_0/src/system/auction/bid_addr.rs deleted file mode 100644 index 618b4994..00000000 --- a/casper_types_ver_2_0/src/system/auction/bid_addr.rs +++ /dev/null @@ -1,335 +0,0 @@ -use crate::{ - account::{AccountHash, ACCOUNT_HASH_LENGTH}, - bytesrepr, - bytesrepr::{FromBytes, ToBytes}, - system::auction::error::Error, - Key, KeyTag, PublicKey, -}; -use alloc::vec::Vec; -use core::fmt::{Debug, Display, Formatter}; -#[cfg(feature = "datasize")] -use datasize::DataSize; -use rand::{ - distributions::{Distribution, Standard}, - Rng, -}; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -const UNIFIED_TAG: u8 = 0; -const VALIDATOR_TAG: u8 = 1; -const DELEGATOR_TAG: u8 = 2; - -/// Serialization tag for BidAddr variants. -#[derive( - Debug, Default, PartialOrd, Ord, PartialEq, Eq, Hash, Clone, Copy, Serialize, Deserialize, -)] -#[repr(u8)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -pub enum BidAddrTag { - /// BidAddr for legacy unified bid. - Unified = UNIFIED_TAG, - /// BidAddr for validator bid. - #[default] - Validator = VALIDATOR_TAG, - /// BidAddr for delegator bid. - Delegator = DELEGATOR_TAG, -} - -impl Display for BidAddrTag { - fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result { - let tag = match self { - BidAddrTag::Unified => UNIFIED_TAG, - BidAddrTag::Validator => VALIDATOR_TAG, - BidAddrTag::Delegator => DELEGATOR_TAG, - }; - write!(f, "{}", base16::encode_lower(&[tag])) - } -} - -impl BidAddrTag { - /// The length in bytes of a [`BidAddrTag`]. - pub const BID_ADDR_TAG_LENGTH: usize = 1; - - /// Attempts to map `BidAddrTag` from a u8. - pub fn try_from_u8(value: u8) -> Option { - // TryFrom requires std, so doing this instead. - if value == UNIFIED_TAG { - return Some(BidAddrTag::Unified); - } - if value == VALIDATOR_TAG { - return Some(BidAddrTag::Validator); - } - if value == DELEGATOR_TAG { - return Some(BidAddrTag::Delegator); - } - - None - } -} - -/// Bid Address -#[derive(PartialOrd, Ord, PartialEq, Eq, Hash, Clone, Copy, Serialize, Deserialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -pub enum BidAddr { - /// Unified BidAddr. - Unified(AccountHash), - /// Validator BidAddr. - Validator(AccountHash), - /// Delegator BidAddr. - Delegator { - /// The validator addr. - validator: AccountHash, - /// The delegator addr. - delegator: AccountHash, - }, -} - -impl BidAddr { - /// The length in bytes of a [`BidAddr`] for a validator bid. - pub const VALIDATOR_BID_ADDR_LENGTH: usize = - ACCOUNT_HASH_LENGTH + BidAddrTag::BID_ADDR_TAG_LENGTH; - - /// The length in bytes of a [`BidAddr`] for a delegator bid. - pub const DELEGATOR_BID_ADDR_LENGTH: usize = - (ACCOUNT_HASH_LENGTH * 2) + BidAddrTag::BID_ADDR_TAG_LENGTH; - - /// Constructs a new [`BidAddr`] instance from a validator's [`AccountHash`]. - pub const fn new_validator_addr(validator: [u8; ACCOUNT_HASH_LENGTH]) -> Self { - BidAddr::Validator(AccountHash::new(validator)) - } - - /// Constructs a new [`BidAddr`] instance from the [`AccountHash`] pair of a validator - /// and a delegator. - pub const fn new_delegator_addr( - pair: ([u8; ACCOUNT_HASH_LENGTH], [u8; ACCOUNT_HASH_LENGTH]), - ) -> Self { - BidAddr::Delegator { - validator: AccountHash::new(pair.0), - delegator: AccountHash::new(pair.1), - } - } - - #[allow(missing_docs)] - pub const fn legacy(validator: [u8; ACCOUNT_HASH_LENGTH]) -> Self { - BidAddr::Unified(AccountHash::new(validator)) - } - - /// Create a new instance of a [`BidAddr`]. - pub fn new_from_public_keys( - validator: &PublicKey, - maybe_delegator: Option<&PublicKey>, - ) -> Self { - if let Some(delegator) = maybe_delegator { - BidAddr::Delegator { - validator: AccountHash::from(validator), - delegator: AccountHash::from(delegator), - } - } else { - BidAddr::Validator(AccountHash::from(validator)) - } - } - - /// Returns the common prefix of all delegators to the cited validator. - pub fn delegators_prefix(&self) -> Result, Error> { - let validator = self.validator_account_hash(); - let mut ret = Vec::with_capacity(validator.serialized_length() + 2); - ret.push(KeyTag::BidAddr as u8); - ret.push(BidAddrTag::Delegator as u8); - validator.write_bytes(&mut ret)?; - Ok(ret) - } - - /// Validator account hash. - pub fn validator_account_hash(&self) -> AccountHash { - match self { - BidAddr::Unified(account_hash) | BidAddr::Validator(account_hash) => *account_hash, - BidAddr::Delegator { validator, .. } => *validator, - } - } - - /// Delegator account hash or none. - pub fn maybe_delegator_account_hash(&self) -> Option { - match self { - BidAddr::Unified(_) | BidAddr::Validator(_) => None, - BidAddr::Delegator { delegator, .. } => Some(*delegator), - } - } - - /// If true, this instance is the key for a delegator bid record. - /// Else, it is the key for a validator bid record. - pub fn is_delegator_bid_addr(&self) -> bool { - match self { - BidAddr::Unified(_) | BidAddr::Validator(_) => false, - BidAddr::Delegator { .. } => true, - } - } - - /// How long will be the serialized value for this instance. - pub fn serialized_length(&self) -> usize { - match self { - BidAddr::Unified(account_hash) | BidAddr::Validator(account_hash) => { - ToBytes::serialized_length(account_hash) + 1 - } - BidAddr::Delegator { - validator, - delegator, - } => ToBytes::serialized_length(validator) + ToBytes::serialized_length(delegator) + 1, - } - } - - /// Returns the BiddAddrTag of this instance. - pub fn tag(&self) -> BidAddrTag { - match self { - BidAddr::Unified(_) => BidAddrTag::Unified, - BidAddr::Validator(_) => BidAddrTag::Validator, - BidAddr::Delegator { .. } => BidAddrTag::Delegator, - } - } -} - -impl ToBytes for BidAddr { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - buffer.push(self.tag() as u8); - buffer.append(&mut self.validator_account_hash().to_bytes()?); - if let Some(delegator) = self.maybe_delegator_account_hash() { - buffer.append(&mut delegator.to_bytes()?); - } - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - self.serialized_length() - } -} - -impl FromBytes for BidAddr { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (tag, remainder): (u8, &[u8]) = FromBytes::from_bytes(bytes)?; - match tag { - tag if tag == BidAddrTag::Unified as u8 => AccountHash::from_bytes(remainder) - .map(|(account_hash, remainder)| (BidAddr::Unified(account_hash), remainder)), - tag if tag == BidAddrTag::Validator as u8 => AccountHash::from_bytes(remainder) - .map(|(account_hash, remainder)| (BidAddr::Validator(account_hash), remainder)), - tag if tag == BidAddrTag::Delegator as u8 => { - let (validator, remainder) = AccountHash::from_bytes(remainder)?; - let (delegator, remainder) = AccountHash::from_bytes(remainder)?; - Ok(( - BidAddr::Delegator { - validator, - delegator, - }, - remainder, - )) - } - _ => Err(bytesrepr::Error::Formatting), - } - } -} - -impl Default for BidAddr { - fn default() -> Self { - BidAddr::Validator(AccountHash::default()) - } -} - -impl From for Key { - fn from(bid_addr: BidAddr) -> Self { - Key::BidAddr(bid_addr) - } -} - -impl From for BidAddr { - fn from(account_hash: AccountHash) -> Self { - BidAddr::Validator(account_hash) - } -} - -impl From for BidAddr { - fn from(public_key: PublicKey) -> Self { - BidAddr::Validator(public_key.to_account_hash()) - } -} - -impl Display for BidAddr { - fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result { - let tag = self.tag(); - match self { - BidAddr::Unified(account_hash) | BidAddr::Validator(account_hash) => { - write!(f, "{}{}", tag, account_hash) - } - BidAddr::Delegator { - validator, - delegator, - } => write!(f, "{}{}{}", tag, validator, delegator), - } - } -} - -impl Debug for BidAddr { - fn fmt(&self, f: &mut Formatter) -> core::fmt::Result { - match self { - BidAddr::Unified(validator) => write!(f, "BidAddr::Unified({:?})", validator), - BidAddr::Validator(validator) => write!(f, "BidAddr::Validator({:?})", validator), - BidAddr::Delegator { - validator, - delegator, - } => { - write!(f, "BidAddr::Delegator({:?}{:?})", validator, delegator) - } - } - } -} - -impl Distribution for Standard { - fn sample(&self, rng: &mut R) -> BidAddr { - BidAddr::Validator(AccountHash::new(rng.gen())) - } -} - -#[cfg(test)] -mod tests { - use crate::{bytesrepr, system::auction::BidAddr}; - - #[test] - fn serialization_roundtrip() { - let bid_addr = BidAddr::legacy([1; 32]); - bytesrepr::test_serialization_roundtrip(&bid_addr); - let bid_addr = BidAddr::new_validator_addr([1; 32]); - bytesrepr::test_serialization_roundtrip(&bid_addr); - let bid_addr = BidAddr::new_delegator_addr(([1; 32], [2; 32])); - bytesrepr::test_serialization_roundtrip(&bid_addr); - } -} - -#[cfg(test)] -mod prop_test_validator_addr { - use proptest::prelude::*; - - use crate::{bytesrepr, gens}; - - proptest! { - #[test] - fn test_value_bid_addr_validator(validator_bid_addr in gens::bid_addr_validator_arb()) { - bytesrepr::test_serialization_roundtrip(&validator_bid_addr); - } - } -} - -#[cfg(test)] -mod prop_test_delegator_addr { - use proptest::prelude::*; - - use crate::{bytesrepr, gens}; - - proptest! { - #[test] - fn test_value_bid_addr_delegator(delegator_bid_addr in gens::bid_addr_delegator_arb()) { - bytesrepr::test_serialization_roundtrip(&delegator_bid_addr); - } - } -} diff --git a/casper_types_ver_2_0/src/system/auction/bid_kind.rs b/casper_types_ver_2_0/src/system/auction/bid_kind.rs deleted file mode 100644 index 865f3ba9..00000000 --- a/casper_types_ver_2_0/src/system/auction/bid_kind.rs +++ /dev/null @@ -1,323 +0,0 @@ -use crate::{ - bytesrepr, - bytesrepr::{FromBytes, ToBytes, U8_SERIALIZED_LENGTH}, - system::auction::{bid::VestingSchedule, Bid, Delegator, ValidatorBid}, - PublicKey, URef, U512, -}; - -use crate::system::auction::BidAddr; -use alloc::{boxed::Box, vec::Vec}; -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -/// BidKindTag variants. -#[allow(clippy::large_enum_variant)] -#[repr(u8)] -#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Clone)] -pub enum BidKindTag { - /// Unified bid. - Unified = 0, - /// Validator bid. - Validator = 1, - /// Delegator bid. - Delegator = 2, -} - -/// Auction bid variants. -#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Clone)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -pub enum BidKind { - /// A unified record indexed on validator data, with an embedded collection of all delegator - /// bids assigned to that validator. The Unified variant is for legacy retrograde support, new - /// instances will not be created going forward. - Unified(Box), - /// A bid record containing only validator data. - Validator(Box), - /// A bid record containing only delegator data. - Delegator(Box), -} - -impl BidKind { - /// Returns validator public key. - pub fn validator_public_key(&self) -> PublicKey { - match self { - BidKind::Unified(bid) => bid.validator_public_key().clone(), - BidKind::Validator(validator_bid) => validator_bid.validator_public_key().clone(), - BidKind::Delegator(delegator_bid) => delegator_bid.validator_public_key().clone(), - } - } - - /// Returns delegator public key, if any. - pub fn maybe_delegator_public_key(&self) -> Option { - match self { - BidKind::Unified(_) | BidKind::Validator(_) => None, - BidKind::Delegator(delegator_bid) => Some(delegator_bid.delegator_public_key().clone()), - } - } - - /// Returns BidAddr. - pub fn bid_addr(&self) -> BidAddr { - match self { - BidKind::Unified(bid) => BidAddr::Unified(bid.validator_public_key().to_account_hash()), - BidKind::Validator(validator_bid) => { - BidAddr::Validator(validator_bid.validator_public_key().to_account_hash()) - } - BidKind::Delegator(delegator_bid) => { - let validator = delegator_bid.validator_public_key().to_account_hash(); - let delegator = delegator_bid.delegator_public_key().to_account_hash(); - BidAddr::Delegator { - validator, - delegator, - } - } - } - } - - /// Is this instance a unified bid?. - pub fn is_unified(&self) -> bool { - match self { - BidKind::Unified(_) => true, - BidKind::Validator(_) | BidKind::Delegator(_) => false, - } - } - - /// Is this instance a validator bid?. - pub fn is_validator(&self) -> bool { - match self { - BidKind::Validator(_) => true, - BidKind::Unified(_) | BidKind::Delegator(_) => false, - } - } - - /// Is this instance a delegator bid?. - pub fn is_delegator(&self) -> bool { - match self { - BidKind::Delegator(_) => true, - BidKind::Unified(_) | BidKind::Validator(_) => false, - } - } - - /// The staked amount. - pub fn staked_amount(&self) -> U512 { - match self { - BidKind::Unified(bid) => *bid.staked_amount(), - BidKind::Validator(validator_bid) => validator_bid.staked_amount(), - BidKind::Delegator(delegator) => delegator.staked_amount(), - } - } - - /// The bonding purse. - pub fn bonding_purse(&self) -> URef { - match self { - BidKind::Unified(bid) => *bid.bonding_purse(), - BidKind::Validator(validator_bid) => *validator_bid.bonding_purse(), - BidKind::Delegator(delegator) => *delegator.bonding_purse(), - } - } - - /// The delegator public key, if relevant. - pub fn delegator_public_key(&self) -> Option { - match self { - BidKind::Unified(_) | BidKind::Validator(_) => None, - BidKind::Delegator(delegator) => Some(delegator.delegator_public_key().clone()), - } - } - - /// Is this bid inactive? - pub fn inactive(&self) -> bool { - match self { - BidKind::Unified(bid) => bid.inactive(), - BidKind::Validator(validator_bid) => validator_bid.inactive(), - BidKind::Delegator(delegator) => delegator.staked_amount().is_zero(), - } - } - - /// Checks if a bid is still locked under a vesting schedule. - /// - /// Returns true if a timestamp falls below the initial lockup period + 91 days release - /// schedule, otherwise false. - pub fn is_locked(&self, timestamp_millis: u64) -> bool { - match self { - BidKind::Unified(bid) => bid.is_locked(timestamp_millis), - BidKind::Validator(validator_bid) => validator_bid.is_locked(timestamp_millis), - BidKind::Delegator(delegator) => delegator.is_locked(timestamp_millis), - } - } - - /// Checks if a bid is still locked under a vesting schedule. - /// - /// Returns true if a timestamp falls below the initial lockup period + 91 days release - /// schedule, otherwise false. - pub fn is_locked_with_vesting_schedule( - &self, - timestamp_millis: u64, - vesting_schedule_period_millis: u64, - ) -> bool { - match self { - BidKind::Unified(bid) => bid - .is_locked_with_vesting_schedule(timestamp_millis, vesting_schedule_period_millis), - BidKind::Validator(validator_bid) => validator_bid - .is_locked_with_vesting_schedule(timestamp_millis, vesting_schedule_period_millis), - BidKind::Delegator(delegator) => delegator - .is_locked_with_vesting_schedule(timestamp_millis, vesting_schedule_period_millis), - } - } - - /// Returns a reference to the vesting schedule of the provided bid. `None` if a non-genesis - /// validator. - pub fn vesting_schedule(&self) -> Option<&VestingSchedule> { - match self { - BidKind::Unified(bid) => bid.vesting_schedule(), - BidKind::Validator(validator_bid) => validator_bid.vesting_schedule(), - BidKind::Delegator(delegator) => delegator.vesting_schedule(), - } - } - - /// BidKindTag. - pub fn tag(&self) -> BidKindTag { - match self { - BidKind::Unified(_) => BidKindTag::Unified, - BidKind::Validator(_) => BidKindTag::Validator, - BidKind::Delegator(_) => BidKindTag::Delegator, - } - } -} - -impl ToBytes for BidKind { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut result = bytesrepr::allocate_buffer(self)?; - let (tag, mut serialized_data) = match self { - BidKind::Unified(bid) => (BidKindTag::Unified, bid.to_bytes()?), - BidKind::Validator(validator_bid) => (BidKindTag::Validator, validator_bid.to_bytes()?), - BidKind::Delegator(delegator_bid) => (BidKindTag::Delegator, delegator_bid.to_bytes()?), - }; - result.push(tag as u8); - result.append(&mut serialized_data); - Ok(result) - } - - fn serialized_length(&self) -> usize { - U8_SERIALIZED_LENGTH - + match self { - BidKind::Unified(bid) => bid.serialized_length(), - BidKind::Validator(validator_bid) => validator_bid.serialized_length(), - BidKind::Delegator(delegator_bid) => delegator_bid.serialized_length(), - } - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - writer.push(self.tag() as u8); - match self { - //StoredValue::CLValue(cl_value) => cl_value.write_bytes(writer)?, - BidKind::Unified(bid) => bid.write_bytes(writer)?, - BidKind::Validator(validator_bid) => validator_bid.write_bytes(writer)?, - BidKind::Delegator(delegator_bid) => delegator_bid.write_bytes(writer)?, - }; - Ok(()) - } -} - -impl FromBytes for BidKind { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (tag, remainder): (u8, &[u8]) = FromBytes::from_bytes(bytes)?; - match tag { - tag if tag == BidKindTag::Unified as u8 => Bid::from_bytes(remainder) - .map(|(bid, remainder)| (BidKind::Unified(Box::new(bid)), remainder)), - tag if tag == BidKindTag::Validator as u8 => { - ValidatorBid::from_bytes(remainder).map(|(validator_bid, remainder)| { - (BidKind::Validator(Box::new(validator_bid)), remainder) - }) - } - tag if tag == BidKindTag::Delegator as u8 => { - Delegator::from_bytes(remainder).map(|(delegator_bid, remainder)| { - (BidKind::Delegator(Box::new(delegator_bid)), remainder) - }) - } - _ => Err(bytesrepr::Error::Formatting), - } - } -} - -#[cfg(test)] -mod tests { - use super::{BidKind, *}; - use crate::{bytesrepr, system::auction::DelegationRate, AccessRights, SecretKey}; - - #[test] - fn serialization_roundtrip() { - let validator_public_key = PublicKey::from( - &SecretKey::ed25519_from_bytes([0u8; SecretKey::ED25519_LENGTH]).unwrap(), - ); - let bonding_purse = URef::new([42; 32], AccessRights::READ_ADD_WRITE); - let bid = Bid::unlocked( - validator_public_key.clone(), - bonding_purse, - U512::one(), - DelegationRate::max_value(), - ); - let unified_bid = BidKind::Unified(Box::new(bid.clone())); - let validator_bid = ValidatorBid::from(bid.clone()); - - let delegator_public_key = PublicKey::from( - &SecretKey::ed25519_from_bytes([1u8; SecretKey::ED25519_LENGTH]).unwrap(), - ); - let delegator = Delegator::unlocked( - delegator_public_key, - U512::one(), - bonding_purse, - validator_public_key, - ); - let delegator_bid = BidKind::Delegator(Box::new(delegator)); - - bytesrepr::test_serialization_roundtrip(&bid); - bytesrepr::test_serialization_roundtrip(&unified_bid); - bytesrepr::test_serialization_roundtrip(&validator_bid); - bytesrepr::test_serialization_roundtrip(&delegator_bid); - } -} - -#[cfg(test)] -mod prop_test_bid_kind_unified { - use proptest::prelude::*; - - use crate::{bytesrepr, gens}; - - proptest! { - #[test] - fn test_value_bid_kind_unified(bid_kind in gens::unified_bid_arb(0..3)) { - bytesrepr::test_serialization_roundtrip(&bid_kind); - } - } -} - -#[cfg(test)] -mod prop_test_bid_kind_validator { - use proptest::prelude::*; - - use crate::{bytesrepr, gens}; - - proptest! { - #[test] - fn test_value_bid_kind_validator(bid_kind in gens::validator_bid_arb()) { - bytesrepr::test_serialization_roundtrip(&bid_kind); - } - } -} - -#[cfg(test)] -mod prop_test_bid_kind_delegator { - use proptest::prelude::*; - - use crate::{bytesrepr, gens}; - - proptest! { - #[test] - fn test_value_bid_kind_delegator(bid_kind in gens::delegator_bid_arb()) { - bytesrepr::test_serialization_roundtrip(&bid_kind); - } - } -} diff --git a/casper_types_ver_2_0/src/system/auction/constants.rs b/casper_types_ver_2_0/src/system/auction/constants.rs deleted file mode 100644 index f3038f8e..00000000 --- a/casper_types_ver_2_0/src/system/auction/constants.rs +++ /dev/null @@ -1,98 +0,0 @@ -use crate::EraId; - -use super::DelegationRate; - -/// Initial value of era id we start at genesis. -pub const INITIAL_ERA_ID: EraId = EraId::new(0); - -/// Initial value of era end timestamp. -pub const INITIAL_ERA_END_TIMESTAMP_MILLIS: u64 = 0; - -/// Delegation rate is a fraction between 0-1. Validator sets the delegation rate -/// in integer terms, which is then divided by the denominator to obtain the fraction. -pub const DELEGATION_RATE_DENOMINATOR: DelegationRate = 100; - -/// We use one trillion as a block reward unit because it's large enough to allow precise -/// fractions, and small enough for many block rewards to fit into a u64. -pub const BLOCK_REWARD: u64 = 1_000_000_000_000; - -/// Named constant for `amount`. -pub const ARG_AMOUNT: &str = "amount"; -/// Named constant for `delegation_rate`. -pub const ARG_DELEGATION_RATE: &str = "delegation_rate"; -/// Named constant for `account_hash`. -pub const ARG_PUBLIC_KEY: &str = "public_key"; -/// Named constant for `validator`. -pub const ARG_VALIDATOR: &str = "validator"; -/// Named constant for `delegator`. -pub const ARG_DELEGATOR: &str = "delegator"; -/// Named constant for `validator_purse`. -pub const ARG_VALIDATOR_PURSE: &str = "validator_purse"; -/// Named constant for `validator_keys`. -pub const ARG_VALIDATOR_KEYS: &str = "validator_keys"; -/// Named constant for `validator_public_keys`. -pub const ARG_VALIDATOR_PUBLIC_KEYS: &str = "validator_public_keys"; -/// Named constant for `new_validator`. -pub const ARG_NEW_VALIDATOR: &str = "new_validator"; -/// Named constant for `era_id`. -pub const ARG_ERA_ID: &str = "era_id"; -/// Named constant for `validator_public_key`. -pub const ARG_VALIDATOR_PUBLIC_KEY: &str = "validator_public_key"; -/// Named constant for `delegator_public_key`. -pub const ARG_DELEGATOR_PUBLIC_KEY: &str = "delegator_public_key"; -/// Named constant for `validator_slots` argument. -pub const ARG_VALIDATOR_SLOTS: &str = VALIDATOR_SLOTS_KEY; -/// Named constant for `mint_contract_package_hash` -pub const ARG_MINT_CONTRACT_PACKAGE_HASH: &str = "mint_contract_package_hash"; -/// Named constant for `genesis_validators` -pub const ARG_GENESIS_VALIDATORS: &str = "genesis_validators"; -/// Named constant of `auction_delay` -pub const ARG_AUCTION_DELAY: &str = "auction_delay"; -/// Named constant for `locked_funds_period` -pub const ARG_LOCKED_FUNDS_PERIOD: &str = "locked_funds_period"; -/// Named constant for `unbonding_delay` -pub const ARG_UNBONDING_DELAY: &str = "unbonding_delay"; -/// Named constant for `era_end_timestamp_millis`; -pub const ARG_ERA_END_TIMESTAMP_MILLIS: &str = "era_end_timestamp_millis"; -/// Named constant for `evicted_validators`; -pub const ARG_EVICTED_VALIDATORS: &str = "evicted_validators"; -/// Named constant for `rewards_map`; -pub const ARG_REWARDS_MAP: &str = "rewards_map"; - -/// Named constant for method `get_era_validators`. -pub const METHOD_GET_ERA_VALIDATORS: &str = "get_era_validators"; -/// Named constant for method `add_bid`. -pub const METHOD_ADD_BID: &str = "add_bid"; -/// Named constant for method `withdraw_bid`. -pub const METHOD_WITHDRAW_BID: &str = "withdraw_bid"; -/// Named constant for method `delegate`. -pub const METHOD_DELEGATE: &str = "delegate"; -/// Named constant for method `undelegate`. -pub const METHOD_UNDELEGATE: &str = "undelegate"; -/// Named constant for method `redelegate`. -pub const METHOD_REDELEGATE: &str = "redelegate"; -/// Named constant for method `run_auction`. -pub const METHOD_RUN_AUCTION: &str = "run_auction"; -/// Named constant for method `slash`. -pub const METHOD_SLASH: &str = "slash"; -/// Named constant for method `distribute`. -pub const METHOD_DISTRIBUTE: &str = "distribute"; -/// Named constant for method `read_era_id`. -pub const METHOD_READ_ERA_ID: &str = "read_era_id"; -/// Named constant for method `activate_bid`. -pub const METHOD_ACTIVATE_BID: &str = "activate_bid"; - -/// Storage for `EraId`. -pub const ERA_ID_KEY: &str = "era_id"; -/// Storage for era-end timestamp. -pub const ERA_END_TIMESTAMP_MILLIS_KEY: &str = "era_end_timestamp_millis"; -/// Storage for `SeigniorageRecipientsSnapshot`. -pub const SEIGNIORAGE_RECIPIENTS_SNAPSHOT_KEY: &str = "seigniorage_recipients_snapshot"; -/// Total validator slots allowed. -pub const VALIDATOR_SLOTS_KEY: &str = "validator_slots"; -/// Amount of auction delay. -pub const AUCTION_DELAY_KEY: &str = "auction_delay"; -/// Default lock period for new bid entries represented in eras. -pub const LOCKED_FUNDS_PERIOD_KEY: &str = "locked_funds_period"; -/// Unbonding delay expressed in eras. -pub const UNBONDING_DELAY_KEY: &str = "unbonding_delay"; diff --git a/casper_types_ver_2_0/src/system/auction/delegator.rs b/casper_types_ver_2_0/src/system/auction/delegator.rs deleted file mode 100644 index ff672353..00000000 --- a/casper_types_ver_2_0/src/system/auction/delegator.rs +++ /dev/null @@ -1,309 +0,0 @@ -use alloc::vec::Vec; -use core::fmt::{self, Display, Formatter}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - system::auction::{bid::VestingSchedule, Error, VESTING_SCHEDULE_LENGTH_MILLIS}, - CLType, CLTyped, PublicKey, URef, U512, -}; - -/// Represents a party delegating their stake to a validator (or "delegatee") -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[serde(deny_unknown_fields)] -pub struct Delegator { - delegator_public_key: PublicKey, - staked_amount: U512, - bonding_purse: URef, - validator_public_key: PublicKey, - vesting_schedule: Option, -} - -impl Delegator { - /// Creates a new [`Delegator`] - pub fn unlocked( - delegator_public_key: PublicKey, - staked_amount: U512, - bonding_purse: URef, - validator_public_key: PublicKey, - ) -> Self { - let vesting_schedule = None; - Delegator { - delegator_public_key, - staked_amount, - bonding_purse, - validator_public_key, - vesting_schedule, - } - } - - /// Creates new instance of a [`Delegator`] with locked funds. - pub fn locked( - delegator_public_key: PublicKey, - staked_amount: U512, - bonding_purse: URef, - validator_public_key: PublicKey, - release_timestamp_millis: u64, - ) -> Self { - let vesting_schedule = Some(VestingSchedule::new(release_timestamp_millis)); - Delegator { - delegator_public_key, - staked_amount, - bonding_purse, - validator_public_key, - vesting_schedule, - } - } - - /// Returns public key of the delegator. - pub fn delegator_public_key(&self) -> &PublicKey { - &self.delegator_public_key - } - - /// Checks if a bid is still locked under a vesting schedule. - /// - /// Returns true if a timestamp falls below the initial lockup period + 91 days release - /// schedule, otherwise false. - pub fn is_locked(&self, timestamp_millis: u64) -> bool { - self.is_locked_with_vesting_schedule(timestamp_millis, VESTING_SCHEDULE_LENGTH_MILLIS) - } - - /// Checks if a bid is still locked under a vesting schedule. - /// - /// Returns true if a timestamp falls below the initial lockup period + 91 days release - /// schedule, otherwise false. - pub fn is_locked_with_vesting_schedule( - &self, - timestamp_millis: u64, - vesting_schedule_period_millis: u64, - ) -> bool { - match &self.vesting_schedule { - Some(vesting_schedule) => { - vesting_schedule.is_vesting(timestamp_millis, vesting_schedule_period_millis) - } - None => false, - } - } - - /// Returns the staked amount - pub fn staked_amount(&self) -> U512 { - self.staked_amount - } - - /// Returns the mutable staked amount - pub fn staked_amount_mut(&mut self) -> &mut U512 { - &mut self.staked_amount - } - - /// Returns the bonding purse - pub fn bonding_purse(&self) -> &URef { - &self.bonding_purse - } - - /// Returns delegatee - pub fn validator_public_key(&self) -> &PublicKey { - &self.validator_public_key - } - - /// Decreases the stake of the provided bid - pub fn decrease_stake( - &mut self, - amount: U512, - era_end_timestamp_millis: u64, - ) -> Result { - let updated_staked_amount = self - .staked_amount - .checked_sub(amount) - .ok_or(Error::InvalidAmount)?; - - let vesting_schedule = match self.vesting_schedule.as_ref() { - Some(vesting_schedule) => vesting_schedule, - None => { - self.staked_amount = updated_staked_amount; - return Ok(updated_staked_amount); - } - }; - - match vesting_schedule.locked_amount(era_end_timestamp_millis) { - Some(locked_amount) if updated_staked_amount < locked_amount => { - Err(Error::DelegatorFundsLocked) - } - None => { - // If `None`, then the locked amounts table has yet to be initialized (likely - // pre-90 day mark) - Err(Error::DelegatorFundsLocked) - } - Some(_) => { - self.staked_amount = updated_staked_amount; - Ok(updated_staked_amount) - } - } - } - - /// Increases the stake of the provided bid - pub fn increase_stake(&mut self, amount: U512) -> Result { - let updated_staked_amount = self - .staked_amount - .checked_add(amount) - .ok_or(Error::InvalidAmount)?; - - self.staked_amount = updated_staked_amount; - - Ok(updated_staked_amount) - } - - /// Returns a reference to the vesting schedule of the provided - /// delegator bid. `None` if a non-genesis validator. - pub fn vesting_schedule(&self) -> Option<&VestingSchedule> { - self.vesting_schedule.as_ref() - } - - /// Returns a mutable reference to the vesting schedule of the provided - /// delegator bid. `None` if a non-genesis validator. - pub fn vesting_schedule_mut(&mut self) -> Option<&mut VestingSchedule> { - self.vesting_schedule.as_mut() - } - - /// Creates a new inactive instance of a bid with 0 staked amount. - pub fn empty( - validator_public_key: PublicKey, - delegator_public_key: PublicKey, - bonding_purse: URef, - ) -> Self { - let vesting_schedule = None; - let staked_amount = 0.into(); - Self { - validator_public_key, - delegator_public_key, - bonding_purse, - staked_amount, - vesting_schedule, - } - } -} - -impl CLTyped for Delegator { - fn cl_type() -> CLType { - CLType::Any - } -} - -impl ToBytes for Delegator { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - buffer.extend(self.delegator_public_key.to_bytes()?); - buffer.extend(self.staked_amount.to_bytes()?); - buffer.extend(self.bonding_purse.to_bytes()?); - buffer.extend(self.validator_public_key.to_bytes()?); - buffer.extend(self.vesting_schedule.to_bytes()?); - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - self.delegator_public_key.serialized_length() - + self.staked_amount.serialized_length() - + self.bonding_purse.serialized_length() - + self.validator_public_key.serialized_length() - + self.vesting_schedule.serialized_length() - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.delegator_public_key.write_bytes(writer)?; - self.staked_amount.write_bytes(writer)?; - self.bonding_purse.write_bytes(writer)?; - self.validator_public_key.write_bytes(writer)?; - self.vesting_schedule.write_bytes(writer)?; - Ok(()) - } -} - -impl FromBytes for Delegator { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (delegator_public_key, bytes) = PublicKey::from_bytes(bytes)?; - let (staked_amount, bytes) = U512::from_bytes(bytes)?; - let (bonding_purse, bytes) = URef::from_bytes(bytes)?; - let (validator_public_key, bytes) = PublicKey::from_bytes(bytes)?; - let (vesting_schedule, bytes) = FromBytes::from_bytes(bytes)?; - Ok(( - Delegator { - delegator_public_key, - staked_amount, - bonding_purse, - validator_public_key, - vesting_schedule, - }, - bytes, - )) - } -} - -impl Display for Delegator { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - write!( - formatter, - "delegator {{ {} {} motes, bonding purse {}, validator {} }}", - self.delegator_public_key, - self.staked_amount, - self.bonding_purse, - self.validator_public_key - ) - } -} - -#[cfg(test)] -mod tests { - use crate::{ - bytesrepr, system::auction::Delegator, AccessRights, PublicKey, SecretKey, URef, U512, - }; - - #[test] - fn serialization_roundtrip() { - let staked_amount = U512::one(); - let bonding_purse = URef::new([42; 32], AccessRights::READ_ADD_WRITE); - let delegator_public_key: PublicKey = PublicKey::from( - &SecretKey::ed25519_from_bytes([42; SecretKey::ED25519_LENGTH]).unwrap(), - ); - - let validator_public_key: PublicKey = PublicKey::from( - &SecretKey::ed25519_from_bytes([43; SecretKey::ED25519_LENGTH]).unwrap(), - ); - let unlocked_delegator = Delegator::unlocked( - delegator_public_key.clone(), - staked_amount, - bonding_purse, - validator_public_key.clone(), - ); - bytesrepr::test_serialization_roundtrip(&unlocked_delegator); - - let release_timestamp_millis = 42; - let locked_delegator = Delegator::locked( - delegator_public_key, - staked_amount, - bonding_purse, - validator_public_key, - release_timestamp_millis, - ); - bytesrepr::test_serialization_roundtrip(&locked_delegator); - } -} - -#[cfg(test)] -mod prop_tests { - use proptest::prelude::*; - - use crate::{bytesrepr, gens}; - - proptest! { - #[test] - fn test_value_bid(bid in gens::delegator_arb()) { - bytesrepr::test_serialization_roundtrip(&bid); - } - } -} diff --git a/casper_types_ver_2_0/src/system/auction/entry_points.rs b/casper_types_ver_2_0/src/system/auction/entry_points.rs deleted file mode 100644 index 252550e5..00000000 --- a/casper_types_ver_2_0/src/system/auction/entry_points.rs +++ /dev/null @@ -1,142 +0,0 @@ -use crate::{ - system::auction::{ - DelegationRate, ValidatorWeights, ARG_AMOUNT, ARG_DELEGATION_RATE, ARG_DELEGATOR, - ARG_ERA_END_TIMESTAMP_MILLIS, ARG_NEW_VALIDATOR, ARG_PUBLIC_KEY, ARG_VALIDATOR, - ARG_VALIDATOR_PUBLIC_KEY, METHOD_ACTIVATE_BID, METHOD_ADD_BID, METHOD_DELEGATE, - METHOD_DISTRIBUTE, METHOD_GET_ERA_VALIDATORS, METHOD_READ_ERA_ID, METHOD_REDELEGATE, - METHOD_RUN_AUCTION, METHOD_SLASH, METHOD_UNDELEGATE, METHOD_WITHDRAW_BID, - }, - CLType, CLTyped, EntryPoint, EntryPointAccess, EntryPointType, EntryPoints, Parameter, - PublicKey, U512, -}; - -use super::ARG_REWARDS_MAP; - -/// Creates auction contract entry points. -pub fn auction_entry_points() -> EntryPoints { - let mut entry_points = EntryPoints::new(); - - let entry_point = EntryPoint::new( - METHOD_GET_ERA_VALIDATORS, - vec![], - Option::::cl_type(), - EntryPointAccess::Public, - EntryPointType::AddressableEntity, - ); - entry_points.add_entry_point(entry_point); - - let entry_point = EntryPoint::new( - METHOD_ADD_BID, - vec![ - Parameter::new(ARG_PUBLIC_KEY, PublicKey::cl_type()), - Parameter::new(ARG_DELEGATION_RATE, DelegationRate::cl_type()), - Parameter::new(ARG_AMOUNT, U512::cl_type()), - ], - U512::cl_type(), - EntryPointAccess::Public, - EntryPointType::AddressableEntity, - ); - entry_points.add_entry_point(entry_point); - - let entry_point = EntryPoint::new( - METHOD_WITHDRAW_BID, - vec![ - Parameter::new(ARG_PUBLIC_KEY, PublicKey::cl_type()), - Parameter::new(ARG_AMOUNT, U512::cl_type()), - ], - U512::cl_type(), - EntryPointAccess::Public, - EntryPointType::AddressableEntity, - ); - entry_points.add_entry_point(entry_point); - - let entry_point = EntryPoint::new( - METHOD_DELEGATE, - vec![ - Parameter::new(ARG_DELEGATOR, PublicKey::cl_type()), - Parameter::new(ARG_VALIDATOR, PublicKey::cl_type()), - Parameter::new(ARG_AMOUNT, U512::cl_type()), - ], - U512::cl_type(), - EntryPointAccess::Public, - EntryPointType::AddressableEntity, - ); - entry_points.add_entry_point(entry_point); - - let entry_point = EntryPoint::new( - METHOD_UNDELEGATE, - vec![ - Parameter::new(ARG_DELEGATOR, PublicKey::cl_type()), - Parameter::new(ARG_VALIDATOR, PublicKey::cl_type()), - Parameter::new(ARG_AMOUNT, U512::cl_type()), - ], - U512::cl_type(), - EntryPointAccess::Public, - EntryPointType::AddressableEntity, - ); - entry_points.add_entry_point(entry_point); - - let entry_point = EntryPoint::new( - METHOD_REDELEGATE, - vec![ - Parameter::new(ARG_DELEGATOR, PublicKey::cl_type()), - Parameter::new(ARG_VALIDATOR, PublicKey::cl_type()), - Parameter::new(ARG_AMOUNT, U512::cl_type()), - Parameter::new(ARG_NEW_VALIDATOR, PublicKey::cl_type()), - ], - U512::cl_type(), - EntryPointAccess::Public, - EntryPointType::AddressableEntity, - ); - entry_points.add_entry_point(entry_point); - - let entry_point = EntryPoint::new( - METHOD_RUN_AUCTION, - vec![Parameter::new(ARG_ERA_END_TIMESTAMP_MILLIS, u64::cl_type())], - CLType::Unit, - EntryPointAccess::Public, - EntryPointType::AddressableEntity, - ); - entry_points.add_entry_point(entry_point); - - let entry_point = EntryPoint::new( - METHOD_SLASH, - vec![], - CLType::Unit, - EntryPointAccess::Public, - EntryPointType::AddressableEntity, - ); - entry_points.add_entry_point(entry_point); - - let entry_point = EntryPoint::new( - METHOD_DISTRIBUTE, - vec![Parameter::new( - ARG_REWARDS_MAP, - CLType::map(CLType::PublicKey, CLType::U512), - )], - CLType::Unit, - EntryPointAccess::Public, - EntryPointType::AddressableEntity, - ); - entry_points.add_entry_point(entry_point); - - let entry_point = EntryPoint::new( - METHOD_READ_ERA_ID, - vec![], - CLType::U64, - EntryPointAccess::Public, - EntryPointType::AddressableEntity, - ); - entry_points.add_entry_point(entry_point); - - let entry_point = EntryPoint::new( - METHOD_ACTIVATE_BID, - vec![Parameter::new(ARG_VALIDATOR_PUBLIC_KEY, CLType::PublicKey)], - CLType::Unit, - EntryPointAccess::Public, - EntryPointType::AddressableEntity, - ); - entry_points.add_entry_point(entry_point); - - entry_points -} diff --git a/casper_types_ver_2_0/src/system/auction/era_info.rs b/casper_types_ver_2_0/src/system/auction/era_info.rs deleted file mode 100644 index d9cb9e4b..00000000 --- a/casper_types_ver_2_0/src/system/auction/era_info.rs +++ /dev/null @@ -1,311 +0,0 @@ -use alloc::{boxed::Box, vec::Vec}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - CLType, CLTyped, PublicKey, U512, -}; - -const SEIGNIORAGE_ALLOCATION_VALIDATOR_TAG: u8 = 0; -const SEIGNIORAGE_ALLOCATION_DELEGATOR_TAG: u8 = 1; - -/// Information about a seigniorage allocation -#[derive(Debug, Clone, Ord, PartialOrd, Eq, PartialEq, Serialize, Deserialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[serde(deny_unknown_fields)] -pub enum SeigniorageAllocation { - /// Info about a seigniorage allocation for a validator - Validator { - /// Validator's public key - validator_public_key: PublicKey, - /// Allocated amount - amount: U512, - }, - /// Info about a seigniorage allocation for a delegator - Delegator { - /// Delegator's public key - delegator_public_key: PublicKey, - /// Validator's public key - validator_public_key: PublicKey, - /// Allocated amount - amount: U512, - }, -} - -impl SeigniorageAllocation { - /// Constructs a [`SeigniorageAllocation::Validator`] - pub const fn validator(validator_public_key: PublicKey, amount: U512) -> Self { - SeigniorageAllocation::Validator { - validator_public_key, - amount, - } - } - - /// Constructs a [`SeigniorageAllocation::Delegator`] - pub const fn delegator( - delegator_public_key: PublicKey, - validator_public_key: PublicKey, - amount: U512, - ) -> Self { - SeigniorageAllocation::Delegator { - delegator_public_key, - validator_public_key, - amount, - } - } - - /// Returns the amount for a given seigniorage allocation - pub fn amount(&self) -> &U512 { - match self { - SeigniorageAllocation::Validator { amount, .. } => amount, - SeigniorageAllocation::Delegator { amount, .. } => amount, - } - } - - fn tag(&self) -> u8 { - match self { - SeigniorageAllocation::Validator { .. } => SEIGNIORAGE_ALLOCATION_VALIDATOR_TAG, - SeigniorageAllocation::Delegator { .. } => SEIGNIORAGE_ALLOCATION_DELEGATOR_TAG, - } - } -} - -impl ToBytes for SeigniorageAllocation { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - self.tag().serialized_length() - + match self { - SeigniorageAllocation::Validator { - validator_public_key, - amount, - } => validator_public_key.serialized_length() + amount.serialized_length(), - SeigniorageAllocation::Delegator { - delegator_public_key, - validator_public_key, - amount, - } => { - delegator_public_key.serialized_length() - + validator_public_key.serialized_length() - + amount.serialized_length() - } - } - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - writer.push(self.tag()); - match self { - SeigniorageAllocation::Validator { - validator_public_key, - amount, - } => { - validator_public_key.write_bytes(writer)?; - amount.write_bytes(writer)?; - } - SeigniorageAllocation::Delegator { - delegator_public_key, - validator_public_key, - amount, - } => { - delegator_public_key.write_bytes(writer)?; - validator_public_key.write_bytes(writer)?; - amount.write_bytes(writer)?; - } - } - Ok(()) - } -} - -impl FromBytes for SeigniorageAllocation { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (tag, rem) = ::from_bytes(bytes)?; - match tag { - SEIGNIORAGE_ALLOCATION_VALIDATOR_TAG => { - let (validator_public_key, rem) = PublicKey::from_bytes(rem)?; - let (amount, rem) = U512::from_bytes(rem)?; - Ok(( - SeigniorageAllocation::validator(validator_public_key, amount), - rem, - )) - } - SEIGNIORAGE_ALLOCATION_DELEGATOR_TAG => { - let (delegator_public_key, rem) = PublicKey::from_bytes(rem)?; - let (validator_public_key, rem) = PublicKey::from_bytes(rem)?; - let (amount, rem) = U512::from_bytes(rem)?; - Ok(( - SeigniorageAllocation::delegator( - delegator_public_key, - validator_public_key, - amount, - ), - rem, - )) - } - _ => Err(bytesrepr::Error::Formatting), - } - } -} - -impl CLTyped for SeigniorageAllocation { - fn cl_type() -> CLType { - CLType::Any - } -} - -/// Auction metadata. Intended to be recorded at each era. -#[derive(Debug, Default, Clone, Ord, PartialOrd, Eq, PartialEq, Serialize, Deserialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[serde(deny_unknown_fields)] -pub struct EraInfo { - seigniorage_allocations: Vec, -} - -impl EraInfo { - /// Constructs a [`EraInfo`]. - pub fn new() -> Self { - let seigniorage_allocations = Vec::new(); - EraInfo { - seigniorage_allocations, - } - } - - /// Returns a reference to the seigniorage allocations collection - pub fn seigniorage_allocations(&self) -> &Vec { - &self.seigniorage_allocations - } - - /// Returns a mutable reference to the seigniorage allocations collection - pub fn seigniorage_allocations_mut(&mut self) -> &mut Vec { - &mut self.seigniorage_allocations - } - - /// Returns all seigniorage allocations that match the provided public key - /// using the following criteria: - /// * If the match candidate is a validator allocation, the provided public key is matched - /// against the validator public key. - /// * If the match candidate is a delegator allocation, the provided public key is matched - /// against the delegator public key. - pub fn select(&self, public_key: PublicKey) -> impl Iterator { - self.seigniorage_allocations - .iter() - .filter(move |allocation| match allocation { - SeigniorageAllocation::Validator { - validator_public_key, - .. - } => public_key == *validator_public_key, - SeigniorageAllocation::Delegator { - delegator_public_key, - .. - } => public_key == *delegator_public_key, - }) - } -} - -impl ToBytes for EraInfo { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut result = bytesrepr::allocate_buffer(self)?; - self.seigniorage_allocations().write_bytes(&mut result)?; - Ok(result) - } - - fn serialized_length(&self) -> usize { - self.seigniorage_allocations.serialized_length() - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.seigniorage_allocations().write_bytes(writer)?; - Ok(()) - } -} - -impl FromBytes for EraInfo { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (seigniorage_allocations, rem) = Vec::::from_bytes(bytes)?; - Ok(( - EraInfo { - seigniorage_allocations, - }, - rem, - )) - } -} - -impl CLTyped for EraInfo { - fn cl_type() -> CLType { - CLType::List(Box::new(SeigniorageAllocation::cl_type())) - } -} - -/// Generators for [`SeigniorageAllocation`] and [`EraInfo`] -#[cfg(any(feature = "testing", feature = "gens", test))] -pub mod gens { - use proptest::{ - collection::{self, SizeRange}, - prelude::Strategy, - prop_oneof, - }; - - use crate::{ - crypto::gens::public_key_arb, - gens::u512_arb, - system::auction::{EraInfo, SeigniorageAllocation}, - }; - - fn seigniorage_allocation_validator_arb() -> impl Strategy { - (public_key_arb(), u512_arb()).prop_map(|(validator_public_key, amount)| { - SeigniorageAllocation::validator(validator_public_key, amount) - }) - } - - fn seigniorage_allocation_delegator_arb() -> impl Strategy { - (public_key_arb(), public_key_arb(), u512_arb()).prop_map( - |(delegator_public_key, validator_public_key, amount)| { - SeigniorageAllocation::delegator(delegator_public_key, validator_public_key, amount) - }, - ) - } - - /// Creates an arbitrary [`SeignorageAllocation`](crate::system::auction::SeigniorageAllocation) - pub fn seigniorage_allocation_arb() -> impl Strategy { - prop_oneof![ - seigniorage_allocation_validator_arb(), - seigniorage_allocation_delegator_arb() - ] - } - - /// Creates an arbitrary [`EraInfo`] - pub fn era_info_arb(size: impl Into) -> impl Strategy { - collection::vec(seigniorage_allocation_arb(), size).prop_map(|allocations| { - let mut era_info = EraInfo::new(); - *era_info.seigniorage_allocations_mut() = allocations; - era_info - }) - } -} - -#[cfg(test)] -mod tests { - use proptest::prelude::*; - - use crate::bytesrepr; - - use super::gens; - - proptest! { - #[test] - fn test_serialization_roundtrip(era_info in gens::era_info_arb(0..32)) { - bytesrepr::test_serialization_roundtrip(&era_info) - } - } -} diff --git a/casper_types_ver_2_0/src/system/auction/error.rs b/casper_types_ver_2_0/src/system/auction/error.rs deleted file mode 100644 index 0ddbb2f8..00000000 --- a/casper_types_ver_2_0/src/system/auction/error.rs +++ /dev/null @@ -1,545 +0,0 @@ -//! Home of the Auction contract's [`enum@Error`] type. -use alloc::vec::Vec; -use core::{ - convert::{TryFrom, TryInto}, - fmt::{self, Display, Formatter}, - result, -}; - -use crate::{ - bytesrepr::{self, FromBytes, ToBytes, U8_SERIALIZED_LENGTH}, - CLType, CLTyped, -}; - -/// Errors which can occur while executing the Auction contract. -#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord)] -#[cfg_attr(test, derive(strum::EnumIter))] -#[repr(u8)] -#[non_exhaustive] -pub enum Error { - /// Unable to find named key in the contract's named keys. - /// ``` - /// # use casper_types_ver_2_0::system::auction::Error; - /// assert_eq!(0, Error::MissingKey as u8); - /// ``` - MissingKey = 0, - /// Given named key contains invalid variant. - /// ``` - /// # use casper_types_ver_2_0::system::auction::Error; - /// assert_eq!(1, Error::InvalidKeyVariant as u8); - /// ``` - InvalidKeyVariant = 1, - /// Value under an uref does not exist. This means the installer contract didn't work properly. - /// ``` - /// # use casper_types_ver_2_0::system::auction::Error; - /// assert_eq!(2, Error::MissingValue as u8); - /// ``` - MissingValue = 2, - /// ABI serialization issue while reading or writing. - /// ``` - /// # use casper_types_ver_2_0::system::auction::Error; - /// assert_eq!(3, Error::Serialization as u8); - /// ``` - Serialization = 3, - /// Triggered when contract was unable to transfer desired amount of tokens into a bid purse. - /// ``` - /// # use casper_types_ver_2_0::system::auction::Error; - /// assert_eq!(4, Error::TransferToBidPurse as u8); - /// ``` - TransferToBidPurse = 4, - /// User passed invalid amount of tokens which might result in wrong values after calculation. - /// ``` - /// # use casper_types_ver_2_0::system::auction::Error; - /// assert_eq!(5, Error::InvalidAmount as u8); - /// ``` - InvalidAmount = 5, - /// Unable to find a bid by account hash in `active_bids` map. - /// ``` - /// # use casper_types_ver_2_0::system::auction::Error; - /// assert_eq!(6, Error::BidNotFound as u8); - /// ``` - BidNotFound = 6, - /// Validator's account hash was not found in the map. - /// ``` - /// # use casper_types_ver_2_0::system::auction::Error; - /// assert_eq!(7, Error::ValidatorNotFound as u8); - /// ``` - ValidatorNotFound = 7, - /// Delegator's account hash was not found in the map. - /// ``` - /// # use casper_types_ver_2_0::system::auction::Error; - /// assert_eq!(8, Error::DelegatorNotFound as u8); - /// ``` - DelegatorNotFound = 8, - /// Storage problem. - /// ``` - /// # use casper_types_ver_2_0::system::auction::Error; - /// assert_eq!(9, Error::Storage as u8); - /// ``` - Storage = 9, - /// Raised when system is unable to bond. - /// ``` - /// # use casper_types_ver_2_0::system::auction::Error; - /// assert_eq!(10, Error::Bonding as u8); - /// ``` - Bonding = 10, - /// Raised when system is unable to unbond. - /// ``` - /// # use casper_types_ver_2_0::system::auction::Error; - /// assert_eq!(11, Error::Unbonding as u8); - /// ``` - Unbonding = 11, - /// Raised when Mint contract is unable to release founder stake. - /// ``` - /// # use casper_types_ver_2_0::system::auction::Error; - /// assert_eq!(12, Error::ReleaseFounderStake as u8); - /// ``` - ReleaseFounderStake = 12, - /// Raised when the system is unable to determine purse balance. - /// ``` - /// # use casper_types_ver_2_0::system::auction::Error; - /// assert_eq!(13, Error::GetBalance as u8); - /// ``` - GetBalance = 13, - /// Raised when an entry point is called from invalid account context. - /// ``` - /// # use casper_types_ver_2_0::system::auction::Error; - /// assert_eq!(14, Error::InvalidContext as u8); - /// ``` - InvalidContext = 14, - /// Raised whenever a validator's funds are still locked in but an attempt to withdraw was - /// made. - /// ``` - /// # use casper_types_ver_2_0::system::auction::Error; - /// assert_eq!(15, Error::ValidatorFundsLocked as u8); - /// ``` - ValidatorFundsLocked = 15, - /// Raised when caller is not the system account. - /// ``` - /// # use casper_types_ver_2_0::system::auction::Error; - /// assert_eq!(16, Error::InvalidCaller as u8); - /// ``` - InvalidCaller = 16, - /// Raised when function is supplied a public key that does match the caller's or does not have - /// an associated account. - /// ``` - /// # use casper_types_ver_2_0::system::auction::Error; - /// assert_eq!(17, Error::InvalidPublicKey as u8); - /// ``` - InvalidPublicKey = 17, - /// Validator is not not bonded. - /// ``` - /// # use casper_types_ver_2_0::system::auction::Error; - /// assert_eq!(18, Error::BondNotFound as u8); - /// ``` - BondNotFound = 18, - /// Unable to create purse. - /// ``` - /// # use casper_types_ver_2_0::system::auction::Error; - /// assert_eq!(19, Error::CreatePurseFailed as u8); - /// ``` - CreatePurseFailed = 19, - /// Attempted to unbond an amount which was too large. - /// ``` - /// # use casper_types_ver_2_0::system::auction::Error; - /// assert_eq!(20, Error::UnbondTooLarge as u8); - /// ``` - UnbondTooLarge = 20, - /// Attempted to bond with a stake which was too small. - /// ``` - /// # use casper_types_ver_2_0::system::auction::Error; - /// assert_eq!(21, Error::BondTooSmall as u8); - /// ``` - BondTooSmall = 21, - /// Raised when rewards are to be distributed to delegators, but the validator has no - /// delegations. - /// ``` - /// # use casper_types_ver_2_0::system::auction::Error; - /// assert_eq!(22, Error::MissingDelegations as u8); - /// ``` - MissingDelegations = 22, - /// The validators returned by the consensus component should match - /// current era validators when distributing rewards. - /// ``` - /// # use casper_types_ver_2_0::system::auction::Error; - /// assert_eq!(23, Error::MismatchedEraValidators as u8); - /// ``` - MismatchedEraValidators = 23, - /// Failed to mint reward tokens. - /// ``` - /// # use casper_types_ver_2_0::system::auction::Error; - /// assert_eq!(24, Error::MintReward as u8); - /// ``` - MintReward = 24, - /// Invalid number of validator slots. - /// ``` - /// # use casper_types_ver_2_0::system::auction::Error; - /// assert_eq!(25, Error::InvalidValidatorSlotsValue as u8); - /// ``` - InvalidValidatorSlotsValue = 25, - /// Failed to reduce total supply. - /// ``` - /// # use casper_types_ver_2_0::system::auction::Error; - /// assert_eq!(26, Error::MintReduceTotalSupply as u8); - /// ``` - MintReduceTotalSupply = 26, - /// Triggered when contract was unable to transfer desired amount of tokens into a delegators - /// purse. - /// ``` - /// # use casper_types_ver_2_0::system::auction::Error; - /// assert_eq!(27, Error::TransferToDelegatorPurse as u8); - /// ``` - TransferToDelegatorPurse = 27, - /// Triggered when contract was unable to perform a transfer to distribute validators reward. - /// ``` - /// # use casper_types_ver_2_0::system::auction::Error; - /// assert_eq!(28, Error::ValidatorRewardTransfer as u8); - /// ``` - ValidatorRewardTransfer = 28, - /// Triggered when contract was unable to perform a transfer to distribute delegators rewards. - /// ``` - /// # use casper_types_ver_2_0::system::auction::Error; - /// assert_eq!(29, Error::DelegatorRewardTransfer as u8); - /// ``` - DelegatorRewardTransfer = 29, - /// Failed to transfer desired amount while withdrawing delegators reward. - /// ``` - /// # use casper_types_ver_2_0::system::auction::Error; - /// assert_eq!(30, Error::WithdrawDelegatorReward as u8); - /// ``` - WithdrawDelegatorReward = 30, - /// Failed to transfer desired amount while withdrawing validators reward. - /// ``` - /// # use casper_types_ver_2_0::system::auction::Error; - /// assert_eq!(31, Error::WithdrawValidatorReward as u8); - /// ``` - WithdrawValidatorReward = 31, - /// Failed to transfer desired amount into unbonding purse. - /// ``` - /// # use casper_types_ver_2_0::system::auction::Error; - /// assert_eq!(32, Error::TransferToUnbondingPurse as u8); - /// ``` - TransferToUnbondingPurse = 32, - /// Failed to record era info. - /// ``` - /// # use casper_types_ver_2_0::system::auction::Error; - /// assert_eq!(33, Error::RecordEraInfo as u8); - /// ``` - RecordEraInfo = 33, - /// Failed to create a [`crate::CLValue`]. - /// ``` - /// # use casper_types_ver_2_0::system::auction::Error; - /// assert_eq!(34, Error::CLValue as u8); - /// ``` - CLValue = 34, - /// Missing seigniorage recipients for given era. - /// ``` - /// # use casper_types_ver_2_0::system::auction::Error; - /// assert_eq!(35, Error::MissingSeigniorageRecipients as u8); - /// ``` - MissingSeigniorageRecipients = 35, - /// Failed to transfer funds. - /// ``` - /// # use casper_types_ver_2_0::system::auction::Error; - /// assert_eq!(36, Error::Transfer as u8); - /// ``` - Transfer = 36, - /// Delegation rate exceeds rate. - /// ``` - /// # use casper_types_ver_2_0::system::auction::Error; - /// assert_eq!(37, Error::DelegationRateTooLarge as u8); - /// ``` - DelegationRateTooLarge = 37, - /// Raised whenever a delegator's funds are still locked in but an attempt to undelegate was - /// made. - /// ``` - /// # use casper_types_ver_2_0::system::auction::Error; - /// assert_eq!(38, Error::DelegatorFundsLocked as u8); - /// ``` - DelegatorFundsLocked = 38, - /// An arithmetic overflow has occurred. - /// ``` - /// # use casper_types_ver_2_0::system::auction::Error; - /// assert_eq!(39, Error::ArithmeticOverflow as u8); - /// ``` - ArithmeticOverflow = 39, - /// Execution exceeded the gas limit. - /// ``` - /// # use casper_types_ver_2_0::system::auction::Error; - /// assert_eq!(40, Error::GasLimit as u8); - /// ``` - GasLimit = 40, - /// Too many frames on the runtime stack. - /// ``` - /// # use casper_types_ver_2_0::system::auction::Error; - /// assert_eq!(41, Error::RuntimeStackOverflow as u8); - /// ``` - RuntimeStackOverflow = 41, - /// An error that is raised when there is an error in the mint contract that cannot - /// be mapped to a specific auction error. - /// ``` - /// # use casper_types_ver_2_0::system::auction::Error; - /// assert_eq!(42, Error::MintError as u8); - /// ``` - MintError = 42, - /// The validator has exceeded the maximum amount of delegators allowed. - /// NOTE: This variant is no longer in use. - /// ``` - /// # use casper_types_ver_2_0::system::auction::Error; - /// assert_eq!(43, Error::ExceededDelegatorSizeLimit as u8); - /// ``` - ExceededDelegatorSizeLimit = 43, - /// The global delegator capacity for the auction has been reached. - /// NOTE: This variant is no longer in use. - /// ``` - /// # use casper_types_ver_2_0::system::auction::Error; - /// assert_eq!(44, Error::GlobalDelegatorCapacityReached as u8); - /// ``` - GlobalDelegatorCapacityReached = 44, - /// The delegated amount is below the minimum allowed. - /// ``` - /// # use casper_types_ver_2_0::system::auction::Error; - /// assert_eq!(45, Error::DelegationAmountTooSmall as u8); - /// ``` - DelegationAmountTooSmall = 45, - /// Runtime stack error. - /// ``` - /// # use casper_types_ver_2_0::system::auction::Error; - /// assert_eq!(46, Error::RuntimeStack as u8); - /// ``` - RuntimeStack = 46, - /// An error that is raised on private chain only when a `disable_auction_bids` flag is set to - /// `true`. - /// ``` - /// # use casper_types_ver_2_0::system::auction::Error; - /// assert_eq!(47, Error::AuctionBidsDisabled as u8); - /// ``` - AuctionBidsDisabled = 47, - /// Error getting accumulation purse. - /// ``` - /// # use casper_types_ver_2_0::system::auction::Error; - /// assert_eq!(48, Error::GetAccumulationPurse as u8); - /// ``` - GetAccumulationPurse = 48, - /// Failed to transfer desired amount into administrators account. - /// ``` - /// # use casper_types_ver_2_0::system::auction::Error; - /// assert_eq!(49, Error::TransferToAdministrator as u8); - /// ``` - TransferToAdministrator = 49, -} - -impl Display for Error { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - match self { - Error::MissingKey => formatter.write_str("Missing key"), - Error::InvalidKeyVariant => formatter.write_str("Invalid key variant"), - Error::MissingValue => formatter.write_str("Missing value"), - Error::Serialization => formatter.write_str("Serialization error"), - Error::TransferToBidPurse => formatter.write_str("Transfer to bid purse error"), - Error::InvalidAmount => formatter.write_str("Invalid amount"), - Error::BidNotFound => formatter.write_str("Bid not found"), - Error::ValidatorNotFound => formatter.write_str("Validator not found"), - Error::DelegatorNotFound => formatter.write_str("Delegator not found"), - Error::Storage => formatter.write_str("Storage error"), - Error::Bonding => formatter.write_str("Bonding error"), - Error::Unbonding => formatter.write_str("Unbonding error"), - Error::ReleaseFounderStake => formatter.write_str("Unable to release founder stake"), - Error::GetBalance => formatter.write_str("Unable to get purse balance"), - Error::InvalidContext => formatter.write_str("Invalid context"), - Error::ValidatorFundsLocked => formatter.write_str("Validator's funds are locked"), - Error::InvalidCaller => formatter.write_str("Function must be called by system account"), - Error::InvalidPublicKey => formatter.write_str("Supplied public key does not match caller's public key or has no associated account"), - Error::BondNotFound => formatter.write_str("Validator's bond not found"), - Error::CreatePurseFailed => formatter.write_str("Unable to create purse"), - Error::UnbondTooLarge => formatter.write_str("Unbond is too large"), - Error::BondTooSmall => formatter.write_str("Bond is too small"), - Error::MissingDelegations => formatter.write_str("Validators has not received any delegations"), - Error::MismatchedEraValidators => formatter.write_str("Mismatched era validator sets to distribute rewards"), - Error::MintReward => formatter.write_str("Failed to mint rewards"), - Error::InvalidValidatorSlotsValue => formatter.write_str("Invalid number of validator slots"), - Error::MintReduceTotalSupply => formatter.write_str("Failed to reduce total supply"), - Error::TransferToDelegatorPurse => formatter.write_str("Transfer to delegators purse error"), - Error::ValidatorRewardTransfer => formatter.write_str("Reward transfer to validator error"), - Error::DelegatorRewardTransfer => formatter.write_str("Rewards transfer to delegator error"), - Error::WithdrawDelegatorReward => formatter.write_str("Withdraw delegator reward error"), - Error::WithdrawValidatorReward => formatter.write_str("Withdraw validator reward error"), - Error::TransferToUnbondingPurse => formatter.write_str("Transfer to unbonding purse error"), - Error::RecordEraInfo => formatter.write_str("Record era info error"), - Error::CLValue => formatter.write_str("CLValue error"), - Error::MissingSeigniorageRecipients => formatter.write_str("Missing seigniorage recipients for given era"), - Error::Transfer => formatter.write_str("Transfer error"), - Error::DelegationRateTooLarge => formatter.write_str("Delegation rate too large"), - Error::DelegatorFundsLocked => formatter.write_str("Delegator's funds are locked"), - Error::ArithmeticOverflow => formatter.write_str("Arithmetic overflow"), - Error::GasLimit => formatter.write_str("Execution exceeded the gas limit"), - Error::RuntimeStackOverflow => formatter.write_str("Runtime stack overflow"), - Error::MintError => formatter.write_str("An error in the mint contract execution"), - Error::ExceededDelegatorSizeLimit => formatter.write_str("The amount of delegators per validator has been exceeded"), - Error::GlobalDelegatorCapacityReached => formatter.write_str("The global delegator capacity has been reached"), - Error::DelegationAmountTooSmall => formatter.write_str("The delegated amount is below the minimum allowed"), - Error::RuntimeStack => formatter.write_str("Runtime stack error"), - Error::AuctionBidsDisabled => formatter.write_str("Auction bids are disabled"), - Error::GetAccumulationPurse => formatter.write_str("Get accumulation purse error"), - Error::TransferToAdministrator => formatter.write_str("Transfer to administrator error"), - } - } -} - -impl CLTyped for Error { - fn cl_type() -> CLType { - CLType::U8 - } -} - -// This error type is not intended to be used by third party crates. -#[doc(hidden)] -#[derive(Debug, PartialEq, Eq)] -pub struct TryFromU8ForError(()); - -// This conversion is not intended to be used by third party crates. -#[doc(hidden)] -impl TryFrom for Error { - type Error = TryFromU8ForError; - - fn try_from(value: u8) -> result::Result { - match value { - d if d == Error::MissingKey as u8 => Ok(Error::MissingKey), - d if d == Error::InvalidKeyVariant as u8 => Ok(Error::InvalidKeyVariant), - d if d == Error::MissingValue as u8 => Ok(Error::MissingValue), - d if d == Error::Serialization as u8 => Ok(Error::Serialization), - d if d == Error::TransferToBidPurse as u8 => Ok(Error::TransferToBidPurse), - d if d == Error::InvalidAmount as u8 => Ok(Error::InvalidAmount), - d if d == Error::BidNotFound as u8 => Ok(Error::BidNotFound), - d if d == Error::ValidatorNotFound as u8 => Ok(Error::ValidatorNotFound), - d if d == Error::DelegatorNotFound as u8 => Ok(Error::DelegatorNotFound), - d if d == Error::Storage as u8 => Ok(Error::Storage), - d if d == Error::Bonding as u8 => Ok(Error::Bonding), - d if d == Error::Unbonding as u8 => Ok(Error::Unbonding), - d if d == Error::ReleaseFounderStake as u8 => Ok(Error::ReleaseFounderStake), - d if d == Error::GetBalance as u8 => Ok(Error::GetBalance), - d if d == Error::InvalidContext as u8 => Ok(Error::InvalidContext), - d if d == Error::ValidatorFundsLocked as u8 => Ok(Error::ValidatorFundsLocked), - d if d == Error::InvalidCaller as u8 => Ok(Error::InvalidCaller), - d if d == Error::InvalidPublicKey as u8 => Ok(Error::InvalidPublicKey), - d if d == Error::BondNotFound as u8 => Ok(Error::BondNotFound), - d if d == Error::CreatePurseFailed as u8 => Ok(Error::CreatePurseFailed), - d if d == Error::UnbondTooLarge as u8 => Ok(Error::UnbondTooLarge), - d if d == Error::BondTooSmall as u8 => Ok(Error::BondTooSmall), - d if d == Error::MissingDelegations as u8 => Ok(Error::MissingDelegations), - d if d == Error::MismatchedEraValidators as u8 => Ok(Error::MismatchedEraValidators), - d if d == Error::MintReward as u8 => Ok(Error::MintReward), - d if d == Error::InvalidValidatorSlotsValue as u8 => { - Ok(Error::InvalidValidatorSlotsValue) - } - d if d == Error::MintReduceTotalSupply as u8 => Ok(Error::MintReduceTotalSupply), - d if d == Error::TransferToDelegatorPurse as u8 => Ok(Error::TransferToDelegatorPurse), - d if d == Error::ValidatorRewardTransfer as u8 => Ok(Error::ValidatorRewardTransfer), - d if d == Error::DelegatorRewardTransfer as u8 => Ok(Error::DelegatorRewardTransfer), - d if d == Error::WithdrawDelegatorReward as u8 => Ok(Error::WithdrawDelegatorReward), - d if d == Error::WithdrawValidatorReward as u8 => Ok(Error::WithdrawValidatorReward), - d if d == Error::TransferToUnbondingPurse as u8 => Ok(Error::TransferToUnbondingPurse), - - d if d == Error::RecordEraInfo as u8 => Ok(Error::RecordEraInfo), - d if d == Error::CLValue as u8 => Ok(Error::CLValue), - d if d == Error::MissingSeigniorageRecipients as u8 => { - Ok(Error::MissingSeigniorageRecipients) - } - d if d == Error::Transfer as u8 => Ok(Error::Transfer), - d if d == Error::DelegationRateTooLarge as u8 => Ok(Error::DelegationRateTooLarge), - d if d == Error::DelegatorFundsLocked as u8 => Ok(Error::DelegatorFundsLocked), - d if d == Error::ArithmeticOverflow as u8 => Ok(Error::ArithmeticOverflow), - d if d == Error::GasLimit as u8 => Ok(Error::GasLimit), - d if d == Error::RuntimeStackOverflow as u8 => Ok(Error::RuntimeStackOverflow), - d if d == Error::MintError as u8 => Ok(Error::MintError), - d if d == Error::ExceededDelegatorSizeLimit as u8 => { - Ok(Error::ExceededDelegatorSizeLimit) - } - d if d == Error::GlobalDelegatorCapacityReached as u8 => { - Ok(Error::GlobalDelegatorCapacityReached) - } - d if d == Error::DelegationAmountTooSmall as u8 => Ok(Error::DelegationAmountTooSmall), - d if d == Error::RuntimeStack as u8 => Ok(Error::RuntimeStack), - d if d == Error::AuctionBidsDisabled as u8 => Ok(Error::AuctionBidsDisabled), - d if d == Error::GetAccumulationPurse as u8 => Ok(Error::GetAccumulationPurse), - d if d == Error::TransferToAdministrator as u8 => Ok(Error::TransferToAdministrator), - _ => Err(TryFromU8ForError(())), - } - } -} - -impl ToBytes for Error { - fn to_bytes(&self) -> result::Result, bytesrepr::Error> { - let value = *self as u8; - value.to_bytes() - } - - fn serialized_length(&self) -> usize { - U8_SERIALIZED_LENGTH - } -} - -impl FromBytes for Error { - fn from_bytes(bytes: &[u8]) -> result::Result<(Self, &[u8]), bytesrepr::Error> { - let (value, rem): (u8, _) = FromBytes::from_bytes(bytes)?; - let error: Error = value - .try_into() - // In case an Error variant is unable to be determined it would return an - // Error::Formatting as if its unable to be correctly deserialized. - .map_err(|_| bytesrepr::Error::Formatting)?; - Ok((error, rem)) - } -} - -impl From for Error { - fn from(_: bytesrepr::Error) -> Self { - Error::Serialization - } -} - -// This error type is not intended to be used by third party crates. -#[doc(hidden)] -pub enum PurseLookupError { - KeyNotFound, - KeyUnexpectedType, -} - -impl From for Error { - fn from(error: PurseLookupError) -> Self { - match error { - PurseLookupError::KeyNotFound => Error::MissingKey, - PurseLookupError::KeyUnexpectedType => Error::InvalidKeyVariant, - } - } -} - -#[cfg(test)] -mod tests { - use std::convert::TryFrom; - - use strum::IntoEnumIterator; - - use super::Error; - - #[test] - fn error_forward_trips() { - for expected_error_variant in Error::iter() { - assert_eq!( - Error::try_from(expected_error_variant as u8), - Ok(expected_error_variant) - ) - } - } - - #[test] - fn error_backward_trips() { - for u8 in 0..=u8::max_value() { - match Error::try_from(u8) { - Ok(error_variant) => { - assert_eq!(u8, error_variant as u8, "Error code mismatch") - } - Err(_) => continue, - }; - } - } -} diff --git a/casper_types_ver_2_0/src/system/auction/seigniorage_recipient.rs b/casper_types_ver_2_0/src/system/auction/seigniorage_recipient.rs deleted file mode 100644 index a82450f6..00000000 --- a/casper_types_ver_2_0/src/system/auction/seigniorage_recipient.rs +++ /dev/null @@ -1,196 +0,0 @@ -use alloc::{collections::BTreeMap, vec::Vec}; - -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - system::auction::{Bid, DelegationRate}, - CLType, CLTyped, PublicKey, U512, -}; - -/// The seigniorage recipient details. -#[derive(Default, PartialEq, Eq, Clone, Debug)] -pub struct SeigniorageRecipient { - /// Validator stake (not including delegators) - stake: U512, - /// Delegation rate of a seigniorage recipient. - delegation_rate: DelegationRate, - /// Delegators and their bids. - delegator_stake: BTreeMap, -} - -impl SeigniorageRecipient { - /// Creates a new SeigniorageRecipient - pub fn new( - stake: U512, - delegation_rate: DelegationRate, - delegator_stake: BTreeMap, - ) -> Self { - Self { - stake, - delegation_rate, - delegator_stake, - } - } - - /// Returns stake of the provided recipient - pub fn stake(&self) -> &U512 { - &self.stake - } - - /// Returns delegation rate of the provided recipient - pub fn delegation_rate(&self) -> &DelegationRate { - &self.delegation_rate - } - - /// Returns delegators of the provided recipient and their stake - pub fn delegator_stake(&self) -> &BTreeMap { - &self.delegator_stake - } - - /// Calculates total stake, including delegators' total stake - pub fn total_stake(&self) -> Option { - self.delegator_total_stake()?.checked_add(self.stake) - } - - /// Calculates total stake for all delegators - pub fn delegator_total_stake(&self) -> Option { - let mut total_stake: U512 = U512::zero(); - for stake in self.delegator_stake.values() { - total_stake = total_stake.checked_add(*stake)?; - } - Some(total_stake) - } -} - -impl CLTyped for SeigniorageRecipient { - fn cl_type() -> CLType { - CLType::Any - } -} - -impl ToBytes for SeigniorageRecipient { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut result = bytesrepr::allocate_buffer(self)?; - result.extend(self.stake.to_bytes()?); - result.extend(self.delegation_rate.to_bytes()?); - result.extend(self.delegator_stake.to_bytes()?); - Ok(result) - } - - fn serialized_length(&self) -> usize { - self.stake.serialized_length() - + self.delegation_rate.serialized_length() - + self.delegator_stake.serialized_length() - } -} - -impl FromBytes for SeigniorageRecipient { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (stake, bytes) = FromBytes::from_bytes(bytes)?; - let (delegation_rate, bytes) = FromBytes::from_bytes(bytes)?; - let (delegator_stake, bytes) = FromBytes::from_bytes(bytes)?; - Ok(( - SeigniorageRecipient { - stake, - delegation_rate, - delegator_stake, - }, - bytes, - )) - } -} - -impl From<&Bid> for SeigniorageRecipient { - fn from(bid: &Bid) -> Self { - let delegator_stake = bid - .delegators() - .iter() - .map(|(public_key, delegator)| (public_key.clone(), delegator.staked_amount())) - .collect(); - Self { - stake: *bid.staked_amount(), - delegation_rate: *bid.delegation_rate(), - delegator_stake, - } - } -} - -#[cfg(test)] -mod tests { - use alloc::collections::BTreeMap; - use core::iter::FromIterator; - - use crate::{ - bytesrepr, - system::auction::{DelegationRate, SeigniorageRecipient}, - PublicKey, SecretKey, U512, - }; - - #[test] - fn serialization_roundtrip() { - let delegator_1_key = PublicKey::from( - &SecretKey::ed25519_from_bytes([42; SecretKey::ED25519_LENGTH]).unwrap(), - ); - let delegator_2_key = PublicKey::from( - &SecretKey::ed25519_from_bytes([43; SecretKey::ED25519_LENGTH]).unwrap(), - ); - let delegator_3_key = PublicKey::from( - &SecretKey::ed25519_from_bytes([44; SecretKey::ED25519_LENGTH]).unwrap(), - ); - let seigniorage_recipient = SeigniorageRecipient { - stake: U512::max_value(), - delegation_rate: DelegationRate::max_value(), - delegator_stake: BTreeMap::from_iter(vec![ - (delegator_1_key, U512::max_value()), - (delegator_2_key, U512::max_value()), - (delegator_3_key, U512::zero()), - ]), - }; - bytesrepr::test_serialization_roundtrip(&seigniorage_recipient); - } - - #[test] - fn test_overflow_in_delegation_rate() { - let delegator_1_key = PublicKey::from( - &SecretKey::ed25519_from_bytes([42; SecretKey::ED25519_LENGTH]).unwrap(), - ); - let delegator_2_key = PublicKey::from( - &SecretKey::ed25519_from_bytes([43; SecretKey::ED25519_LENGTH]).unwrap(), - ); - let delegator_3_key = PublicKey::from( - &SecretKey::ed25519_from_bytes([44; SecretKey::ED25519_LENGTH]).unwrap(), - ); - let seigniorage_recipient = SeigniorageRecipient { - stake: U512::max_value(), - delegation_rate: DelegationRate::max_value(), - delegator_stake: BTreeMap::from_iter(vec![ - (delegator_1_key, U512::max_value()), - (delegator_2_key, U512::max_value()), - (delegator_3_key, U512::zero()), - ]), - }; - assert_eq!(seigniorage_recipient.total_stake(), None) - } - - #[test] - fn test_overflow_in_delegation_total_stake() { - let delegator_1_key = PublicKey::from( - &SecretKey::ed25519_from_bytes([42; SecretKey::ED25519_LENGTH]).unwrap(), - ); - let delegator_2_key = PublicKey::from( - &SecretKey::ed25519_from_bytes([43; SecretKey::ED25519_LENGTH]).unwrap(), - ); - let delegator_3_key = PublicKey::from( - &SecretKey::ed25519_from_bytes([44; SecretKey::ED25519_LENGTH]).unwrap(), - ); - let seigniorage_recipient = SeigniorageRecipient { - stake: U512::max_value(), - delegation_rate: DelegationRate::max_value(), - delegator_stake: BTreeMap::from_iter(vec![ - (delegator_1_key, U512::max_value()), - (delegator_2_key, U512::max_value()), - (delegator_3_key, U512::max_value()), - ]), - }; - assert_eq!(seigniorage_recipient.delegator_total_stake(), None) - } -} diff --git a/casper_types_ver_2_0/src/system/auction/unbonding_purse.rs b/casper_types_ver_2_0/src/system/auction/unbonding_purse.rs deleted file mode 100644 index 965376d2..00000000 --- a/casper_types_ver_2_0/src/system/auction/unbonding_purse.rs +++ /dev/null @@ -1,238 +0,0 @@ -use alloc::vec::Vec; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - CLType, CLTyped, EraId, PublicKey, URef, U512, -}; - -use super::WithdrawPurse; - -/// Unbonding purse. -#[derive(PartialEq, Eq, Debug, Serialize, Deserialize, Clone)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[serde(deny_unknown_fields)] -pub struct UnbondingPurse { - /// Bonding Purse - bonding_purse: URef, - /// Validators public key. - validator_public_key: PublicKey, - /// Unbonders public key. - unbonder_public_key: PublicKey, - /// Era in which this unbonding request was created. - era_of_creation: EraId, - /// Unbonding Amount. - amount: U512, - /// The validator public key to re-delegate to. - new_validator: Option, -} - -impl UnbondingPurse { - /// Creates [`UnbondingPurse`] instance for an unbonding request. - pub const fn new( - bonding_purse: URef, - validator_public_key: PublicKey, - unbonder_public_key: PublicKey, - era_of_creation: EraId, - amount: U512, - new_validator: Option, - ) -> Self { - Self { - bonding_purse, - validator_public_key, - unbonder_public_key, - era_of_creation, - amount, - new_validator, - } - } - - /// Checks if given request is made by a validator by checking if public key of unbonder is same - /// as a key owned by validator. - pub fn is_validator(&self) -> bool { - self.validator_public_key == self.unbonder_public_key - } - - /// Returns bonding purse used to make this unbonding request. - pub fn bonding_purse(&self) -> &URef { - &self.bonding_purse - } - - /// Returns public key of validator. - pub fn validator_public_key(&self) -> &PublicKey { - &self.validator_public_key - } - - /// Returns public key of unbonder. - /// - /// For withdrawal requests that originated from validator's public key through `withdraw_bid` - /// entrypoint this is equal to [`UnbondingPurse::validator_public_key`] and - /// [`UnbondingPurse::is_validator`] is `true`. - pub fn unbonder_public_key(&self) -> &PublicKey { - &self.unbonder_public_key - } - - /// Returns era which was used to create this unbonding request. - pub fn era_of_creation(&self) -> EraId { - self.era_of_creation - } - - /// Returns unbonding amount. - pub fn amount(&self) -> &U512 { - &self.amount - } - - /// Returns the public key for the new validator. - pub fn new_validator(&self) -> &Option { - &self.new_validator - } - - /// Sets amount to provided value. - pub fn with_amount(&mut self, amount: U512) { - self.amount = amount; - } -} - -impl ToBytes for UnbondingPurse { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut result = bytesrepr::allocate_buffer(self)?; - result.extend(&self.bonding_purse.to_bytes()?); - result.extend(&self.validator_public_key.to_bytes()?); - result.extend(&self.unbonder_public_key.to_bytes()?); - result.extend(&self.era_of_creation.to_bytes()?); - result.extend(&self.amount.to_bytes()?); - result.extend(&self.new_validator.to_bytes()?); - Ok(result) - } - fn serialized_length(&self) -> usize { - self.bonding_purse.serialized_length() - + self.validator_public_key.serialized_length() - + self.unbonder_public_key.serialized_length() - + self.era_of_creation.serialized_length() - + self.amount.serialized_length() - + self.new_validator.serialized_length() - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.bonding_purse.write_bytes(writer)?; - self.validator_public_key.write_bytes(writer)?; - self.unbonder_public_key.write_bytes(writer)?; - self.era_of_creation.write_bytes(writer)?; - self.amount.write_bytes(writer)?; - self.new_validator.write_bytes(writer)?; - Ok(()) - } -} - -impl FromBytes for UnbondingPurse { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (bonding_purse, remainder) = FromBytes::from_bytes(bytes)?; - let (validator_public_key, remainder) = FromBytes::from_bytes(remainder)?; - let (unbonder_public_key, remainder) = FromBytes::from_bytes(remainder)?; - let (era_of_creation, remainder) = FromBytes::from_bytes(remainder)?; - let (amount, remainder) = FromBytes::from_bytes(remainder)?; - let (new_validator, remainder) = Option::::from_bytes(remainder)?; - - Ok(( - UnbondingPurse { - bonding_purse, - validator_public_key, - unbonder_public_key, - era_of_creation, - amount, - new_validator, - }, - remainder, - )) - } -} - -impl CLTyped for UnbondingPurse { - fn cl_type() -> CLType { - CLType::Any - } -} - -impl From for UnbondingPurse { - fn from(withdraw_purse: WithdrawPurse) -> Self { - UnbondingPurse::new( - withdraw_purse.bonding_purse, - withdraw_purse.validator_public_key, - withdraw_purse.unbonder_public_key, - withdraw_purse.era_of_creation, - withdraw_purse.amount, - None, - ) - } -} - -#[cfg(test)] -mod tests { - use crate::{ - bytesrepr, system::auction::UnbondingPurse, AccessRights, EraId, PublicKey, SecretKey, - URef, U512, - }; - - const BONDING_PURSE: URef = URef::new([14; 32], AccessRights::READ_ADD_WRITE); - const ERA_OF_WITHDRAWAL: EraId = EraId::MAX; - - fn validator_public_key() -> PublicKey { - let secret_key = SecretKey::ed25519_from_bytes([42; SecretKey::ED25519_LENGTH]).unwrap(); - PublicKey::from(&secret_key) - } - - fn unbonder_public_key() -> PublicKey { - let secret_key = SecretKey::ed25519_from_bytes([43; SecretKey::ED25519_LENGTH]).unwrap(); - PublicKey::from(&secret_key) - } - - fn amount() -> U512 { - U512::max_value() - 1 - } - - #[test] - fn serialization_roundtrip_for_unbonding_purse() { - let unbonding_purse = UnbondingPurse { - bonding_purse: BONDING_PURSE, - validator_public_key: validator_public_key(), - unbonder_public_key: unbonder_public_key(), - era_of_creation: ERA_OF_WITHDRAWAL, - amount: amount(), - new_validator: None, - }; - - bytesrepr::test_serialization_roundtrip(&unbonding_purse); - } - - #[test] - fn should_be_validator_condition_for_unbonding_purse() { - let validator_unbonding_purse = UnbondingPurse::new( - BONDING_PURSE, - validator_public_key(), - validator_public_key(), - ERA_OF_WITHDRAWAL, - amount(), - None, - ); - assert!(validator_unbonding_purse.is_validator()); - } - - #[test] - fn should_be_delegator_condition_for_unbonding_purse() { - let delegator_unbonding_purse = UnbondingPurse::new( - BONDING_PURSE, - validator_public_key(), - unbonder_public_key(), - ERA_OF_WITHDRAWAL, - amount(), - None, - ); - assert!(!delegator_unbonding_purse.is_validator()); - } -} diff --git a/casper_types_ver_2_0/src/system/auction/validator_bid.rs b/casper_types_ver_2_0/src/system/auction/validator_bid.rs deleted file mode 100644 index a90b725b..00000000 --- a/casper_types_ver_2_0/src/system/auction/validator_bid.rs +++ /dev/null @@ -1,380 +0,0 @@ -// TODO - remove once schemars stops causing warning. -#![allow(clippy::field_reassign_with_default)] - -use alloc::vec::Vec; - -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - system::auction::{ - bid::VestingSchedule, DelegationRate, Error, VESTING_SCHEDULE_LENGTH_MILLIS, - }, - CLType, CLTyped, PublicKey, URef, U512, -}; - -use crate::system::auction::Bid; -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -/// An entry in the validator map. -#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Clone)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[serde(deny_unknown_fields)] -pub struct ValidatorBid { - /// Validator public key - validator_public_key: PublicKey, - /// The purse that was used for bonding. - bonding_purse: URef, - /// The amount of tokens staked by a validator (not including delegators). - staked_amount: U512, - /// Delegation rate - delegation_rate: DelegationRate, - /// Vesting schedule for a genesis validator. `None` if non-genesis validator. - vesting_schedule: Option, - /// `true` if validator has been "evicted" - inactive: bool, -} - -impl ValidatorBid { - /// Creates new instance of a bid with locked funds. - pub fn locked( - validator_public_key: PublicKey, - bonding_purse: URef, - staked_amount: U512, - delegation_rate: DelegationRate, - release_timestamp_millis: u64, - ) -> Self { - let vesting_schedule = Some(VestingSchedule::new(release_timestamp_millis)); - let inactive = false; - Self { - validator_public_key, - bonding_purse, - staked_amount, - delegation_rate, - vesting_schedule, - inactive, - } - } - - /// Creates new instance of a bid with unlocked funds. - pub fn unlocked( - validator_public_key: PublicKey, - bonding_purse: URef, - staked_amount: U512, - delegation_rate: DelegationRate, - ) -> Self { - let vesting_schedule = None; - let inactive = false; - Self { - validator_public_key, - bonding_purse, - staked_amount, - delegation_rate, - vesting_schedule, - inactive, - } - } - - /// Creates a new inactive instance of a bid with 0 staked amount. - pub fn empty(validator_public_key: PublicKey, bonding_purse: URef) -> Self { - let vesting_schedule = None; - let inactive = true; - let staked_amount = 0.into(); - let delegation_rate = Default::default(); - Self { - validator_public_key, - bonding_purse, - staked_amount, - delegation_rate, - vesting_schedule, - inactive, - } - } - - /// Gets the validator public key of the provided bid - pub fn validator_public_key(&self) -> &PublicKey { - &self.validator_public_key - } - - /// Gets the bonding purse of the provided bid - pub fn bonding_purse(&self) -> &URef { - &self.bonding_purse - } - - /// Checks if a bid is still locked under a vesting schedule. - /// - /// Returns true if a timestamp falls below the initial lockup period + 91 days release - /// schedule, otherwise false. - pub fn is_locked(&self, timestamp_millis: u64) -> bool { - self.is_locked_with_vesting_schedule(timestamp_millis, VESTING_SCHEDULE_LENGTH_MILLIS) - } - - /// Checks if a bid is still locked under a vesting schedule. - /// - /// Returns true if a timestamp falls below the initial lockup period + 91 days release - /// schedule, otherwise false. - pub fn is_locked_with_vesting_schedule( - &self, - timestamp_millis: u64, - vesting_schedule_period_millis: u64, - ) -> bool { - match &self.vesting_schedule { - Some(vesting_schedule) => { - vesting_schedule.is_vesting(timestamp_millis, vesting_schedule_period_millis) - } - None => false, - } - } - - /// Gets the staked amount of the provided bid - pub fn staked_amount(&self) -> U512 { - self.staked_amount - } - - /// Gets the staked amount of the provided bid - pub fn staked_amount_mut(&mut self) -> &mut U512 { - &mut self.staked_amount - } - - /// Gets the delegation rate of the provided bid - pub fn delegation_rate(&self) -> &DelegationRate { - &self.delegation_rate - } - - /// Returns a reference to the vesting schedule of the provided bid. `None` if a non-genesis - /// validator. - pub fn vesting_schedule(&self) -> Option<&VestingSchedule> { - self.vesting_schedule.as_ref() - } - - /// Returns a mutable reference to the vesting schedule of the provided bid. `None` if a - /// non-genesis validator. - pub fn vesting_schedule_mut(&mut self) -> Option<&mut VestingSchedule> { - self.vesting_schedule.as_mut() - } - - /// Returns `true` if validator is inactive - pub fn inactive(&self) -> bool { - self.inactive - } - - /// Decreases the stake of the provided bid - pub fn decrease_stake( - &mut self, - amount: U512, - era_end_timestamp_millis: u64, - ) -> Result { - let updated_staked_amount = self - .staked_amount - .checked_sub(amount) - .ok_or(Error::UnbondTooLarge)?; - - let vesting_schedule = match self.vesting_schedule.as_ref() { - Some(vesting_schedule) => vesting_schedule, - None => { - self.staked_amount = updated_staked_amount; - return Ok(updated_staked_amount); - } - }; - - match vesting_schedule.locked_amount(era_end_timestamp_millis) { - Some(locked_amount) if updated_staked_amount < locked_amount => { - Err(Error::ValidatorFundsLocked) - } - None => { - // If `None`, then the locked amounts table has yet to be initialized (likely - // pre-90 day mark) - Err(Error::ValidatorFundsLocked) - } - Some(_) => { - self.staked_amount = updated_staked_amount; - Ok(updated_staked_amount) - } - } - } - - /// Increases the stake of the provided bid - pub fn increase_stake(&mut self, amount: U512) -> Result { - let updated_staked_amount = self - .staked_amount - .checked_add(amount) - .ok_or(Error::InvalidAmount)?; - - self.staked_amount = updated_staked_amount; - - Ok(updated_staked_amount) - } - - /// Updates the delegation rate of the provided bid - pub fn with_delegation_rate(&mut self, delegation_rate: DelegationRate) -> &mut Self { - self.delegation_rate = delegation_rate; - self - } - - /// Sets given bid's `inactive` field to `false` - pub fn activate(&mut self) -> bool { - self.inactive = false; - false - } - - /// Sets given bid's `inactive` field to `true` - pub fn deactivate(&mut self) -> bool { - self.inactive = true; - true - } -} - -impl CLTyped for ValidatorBid { - fn cl_type() -> CLType { - CLType::Any - } -} - -impl ToBytes for ValidatorBid { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut result = bytesrepr::allocate_buffer(self)?; - self.validator_public_key.write_bytes(&mut result)?; - self.bonding_purse.write_bytes(&mut result)?; - self.staked_amount.write_bytes(&mut result)?; - self.delegation_rate.write_bytes(&mut result)?; - self.vesting_schedule.write_bytes(&mut result)?; - self.inactive.write_bytes(&mut result)?; - Ok(result) - } - - fn serialized_length(&self) -> usize { - self.validator_public_key.serialized_length() - + self.bonding_purse.serialized_length() - + self.staked_amount.serialized_length() - + self.delegation_rate.serialized_length() - + self.vesting_schedule.serialized_length() - + self.inactive.serialized_length() - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.validator_public_key.write_bytes(writer)?; - self.bonding_purse.write_bytes(writer)?; - self.staked_amount.write_bytes(writer)?; - self.delegation_rate.write_bytes(writer)?; - self.vesting_schedule.write_bytes(writer)?; - self.inactive.write_bytes(writer)?; - Ok(()) - } -} - -impl FromBytes for ValidatorBid { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (validator_public_key, bytes) = FromBytes::from_bytes(bytes)?; - let (bonding_purse, bytes) = FromBytes::from_bytes(bytes)?; - let (staked_amount, bytes) = FromBytes::from_bytes(bytes)?; - let (delegation_rate, bytes) = FromBytes::from_bytes(bytes)?; - let (vesting_schedule, bytes) = FromBytes::from_bytes(bytes)?; - let (inactive, bytes) = FromBytes::from_bytes(bytes)?; - Ok(( - ValidatorBid { - validator_public_key, - bonding_purse, - staked_amount, - delegation_rate, - vesting_schedule, - inactive, - }, - bytes, - )) - } -} - -impl From for ValidatorBid { - fn from(bid: Bid) -> Self { - ValidatorBid { - validator_public_key: bid.validator_public_key().clone(), - bonding_purse: *bid.bonding_purse(), - staked_amount: *bid.staked_amount(), - delegation_rate: *bid.delegation_rate(), - vesting_schedule: bid.vesting_schedule().cloned(), - inactive: bid.inactive(), - } - } -} - -#[cfg(test)] -mod tests { - use crate::{ - bytesrepr, - system::auction::{bid::VestingSchedule, DelegationRate, ValidatorBid}, - AccessRights, PublicKey, SecretKey, URef, U512, - }; - - #[test] - fn serialization_roundtrip_active() { - let founding_validator = ValidatorBid { - validator_public_key: PublicKey::from( - &SecretKey::ed25519_from_bytes([0u8; SecretKey::ED25519_LENGTH]).unwrap(), - ), - bonding_purse: URef::new([42; 32], AccessRights::READ_ADD_WRITE), - staked_amount: U512::one(), - delegation_rate: DelegationRate::MAX, - vesting_schedule: Some(VestingSchedule::default()), - inactive: false, - }; - bytesrepr::test_serialization_roundtrip(&founding_validator); - } - - #[test] - fn serialization_roundtrip_inactive() { - let founding_validator = ValidatorBid { - validator_public_key: PublicKey::from( - &SecretKey::ed25519_from_bytes([0u8; SecretKey::ED25519_LENGTH]).unwrap(), - ), - bonding_purse: URef::new([42; 32], AccessRights::READ_ADD_WRITE), - staked_amount: U512::one(), - delegation_rate: DelegationRate::max_value(), - vesting_schedule: Some(VestingSchedule::default()), - inactive: true, - }; - bytesrepr::test_serialization_roundtrip(&founding_validator); - } - - #[test] - fn should_immediately_initialize_unlock_amounts() { - const TIMESTAMP_MILLIS: u64 = 0; - - let validator_pk: PublicKey = (&SecretKey::ed25519_from_bytes([42; 32]).unwrap()).into(); - - let validator_release_timestamp = TIMESTAMP_MILLIS; - let vesting_schedule_period_millis = TIMESTAMP_MILLIS; - let validator_bonding_purse = URef::new([42; 32], AccessRights::ADD); - let validator_staked_amount = U512::from(1000); - let validator_delegation_rate = 0; - - let bid = ValidatorBid::locked( - validator_pk, - validator_bonding_purse, - validator_staked_amount, - validator_delegation_rate, - validator_release_timestamp, - ); - - assert!(!bid.is_locked_with_vesting_schedule( - validator_release_timestamp, - vesting_schedule_period_millis - )); - } -} - -#[cfg(test)] -mod prop_tests { - use proptest::prelude::*; - - use crate::{bytesrepr, gens}; - - proptest! { - #[test] - fn test_value_bid(bid in gens::validator_bid_arb()) { - bytesrepr::test_serialization_roundtrip(&bid); - } - } -} diff --git a/casper_types_ver_2_0/src/system/auction/withdraw_purse.rs b/casper_types_ver_2_0/src/system/auction/withdraw_purse.rs deleted file mode 100644 index 9dc3806b..00000000 --- a/casper_types_ver_2_0/src/system/auction/withdraw_purse.rs +++ /dev/null @@ -1,192 +0,0 @@ -use alloc::vec::Vec; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - CLType, CLTyped, EraId, PublicKey, URef, U512, -}; - -/// A withdraw purse, a legacy structure. -#[derive(PartialEq, Eq, Debug, Serialize, Deserialize, Clone)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[serde(deny_unknown_fields)] -pub struct WithdrawPurse { - /// Bonding Purse - pub(crate) bonding_purse: URef, - /// Validators public key. - pub(crate) validator_public_key: PublicKey, - /// Unbonders public key. - pub(crate) unbonder_public_key: PublicKey, - /// Era in which this unbonding request was created. - pub(crate) era_of_creation: EraId, - /// Unbonding Amount. - pub(crate) amount: U512, -} - -impl WithdrawPurse { - /// Creates [`WithdrawPurse`] instance for an unbonding request. - pub const fn new( - bonding_purse: URef, - validator_public_key: PublicKey, - unbonder_public_key: PublicKey, - era_of_creation: EraId, - amount: U512, - ) -> Self { - Self { - bonding_purse, - validator_public_key, - unbonder_public_key, - era_of_creation, - amount, - } - } - - /// Checks if given request is made by a validator by checking if public key of unbonder is same - /// as a key owned by validator. - pub fn is_validator(&self) -> bool { - self.validator_public_key == self.unbonder_public_key - } - - /// Returns bonding purse used to make this unbonding request. - pub fn bonding_purse(&self) -> &URef { - &self.bonding_purse - } - - /// Returns public key of validator. - pub fn validator_public_key(&self) -> &PublicKey { - &self.validator_public_key - } - - /// Returns public key of unbonder. - /// - /// For withdrawal requests that originated from validator's public key through `withdraw_bid` - /// entrypoint this is equal to [`WithdrawPurse::validator_public_key`] and - /// [`WithdrawPurse::is_validator`] is `true`. - pub fn unbonder_public_key(&self) -> &PublicKey { - &self.unbonder_public_key - } - - /// Returns era which was used to create this unbonding request. - pub fn era_of_creation(&self) -> EraId { - self.era_of_creation - } - - /// Returns unbonding amount. - pub fn amount(&self) -> &U512 { - &self.amount - } -} - -impl ToBytes for WithdrawPurse { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut result = bytesrepr::allocate_buffer(self)?; - result.extend(&self.bonding_purse.to_bytes()?); - result.extend(&self.validator_public_key.to_bytes()?); - result.extend(&self.unbonder_public_key.to_bytes()?); - result.extend(&self.era_of_creation.to_bytes()?); - result.extend(&self.amount.to_bytes()?); - - Ok(result) - } - fn serialized_length(&self) -> usize { - self.bonding_purse.serialized_length() - + self.validator_public_key.serialized_length() - + self.unbonder_public_key.serialized_length() - + self.era_of_creation.serialized_length() - + self.amount.serialized_length() - } -} - -impl FromBytes for WithdrawPurse { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (bonding_purse, remainder) = FromBytes::from_bytes(bytes)?; - let (validator_public_key, remainder) = FromBytes::from_bytes(remainder)?; - let (unbonder_public_key, remainder) = FromBytes::from_bytes(remainder)?; - let (era_of_creation, remainder) = FromBytes::from_bytes(remainder)?; - let (amount, remainder) = FromBytes::from_bytes(remainder)?; - - Ok(( - WithdrawPurse { - bonding_purse, - validator_public_key, - unbonder_public_key, - era_of_creation, - amount, - }, - remainder, - )) - } -} - -impl CLTyped for WithdrawPurse { - fn cl_type() -> CLType { - CLType::Any - } -} - -#[cfg(test)] -mod tests { - use crate::{bytesrepr, AccessRights, EraId, PublicKey, SecretKey, URef, U512}; - - use super::WithdrawPurse; - - const BONDING_PURSE: URef = URef::new([41; 32], AccessRights::READ_ADD_WRITE); - const ERA_OF_WITHDRAWAL: EraId = EraId::MAX; - - fn validator_public_key() -> PublicKey { - let secret_key = SecretKey::ed25519_from_bytes([44; SecretKey::ED25519_LENGTH]).unwrap(); - PublicKey::from(&secret_key) - } - - fn unbonder_public_key() -> PublicKey { - let secret_key = SecretKey::ed25519_from_bytes([45; SecretKey::ED25519_LENGTH]).unwrap(); - PublicKey::from(&secret_key) - } - - fn amount() -> U512 { - U512::max_value() - 1 - } - - #[test] - fn serialization_roundtrip_for_withdraw_purse() { - let withdraw_purse = WithdrawPurse { - bonding_purse: BONDING_PURSE, - validator_public_key: validator_public_key(), - unbonder_public_key: unbonder_public_key(), - era_of_creation: ERA_OF_WITHDRAWAL, - amount: amount(), - }; - - bytesrepr::test_serialization_roundtrip(&withdraw_purse); - } - - #[test] - fn should_be_validator_condition_for_withdraw_purse() { - let validator_withdraw_purse = WithdrawPurse::new( - BONDING_PURSE, - validator_public_key(), - validator_public_key(), - ERA_OF_WITHDRAWAL, - amount(), - ); - assert!(validator_withdraw_purse.is_validator()); - } - - #[test] - fn should_be_delegator_condition_for_withdraw_purse() { - let delegator_withdraw_purse = WithdrawPurse::new( - BONDING_PURSE, - validator_public_key(), - unbonder_public_key(), - ERA_OF_WITHDRAWAL, - amount(), - ); - assert!(!delegator_withdraw_purse.is_validator()); - } -} diff --git a/casper_types_ver_2_0/src/system/call_stack_element.rs b/casper_types_ver_2_0/src/system/call_stack_element.rs deleted file mode 100644 index df09eac3..00000000 --- a/casper_types_ver_2_0/src/system/call_stack_element.rs +++ /dev/null @@ -1,164 +0,0 @@ -use alloc::vec::Vec; - -use num_derive::{FromPrimitive, ToPrimitive}; -use num_traits::FromPrimitive; - -use crate::{ - account::AccountHash, - bytesrepr::{self, FromBytes, ToBytes, U8_SERIALIZED_LENGTH}, - package::PackageHash, - AddressableEntityHash, CLType, CLTyped, -}; - -/// Tag representing variants of CallStackElement for purposes of serialization. -#[derive(FromPrimitive, ToPrimitive)] -#[repr(u8)] -pub enum CallStackElementTag { - /// Session tag. - Session = 0, - /// StoredContract tag. - StoredContract, -} - -/// Represents the origin of a sub-call. -#[derive(Clone, Debug, PartialEq, Eq)] -pub enum CallStackElement { - /// Session - Session { - /// The account hash of the caller - account_hash: AccountHash, - }, - // /// Effectively an EntryPointType::Session - stored access to a session. - // StoredSession { - // /// The account hash of the caller - // account_hash: AccountHash, - // /// The package hash - // package_hash: PackageHash, - // /// The contract hash - // contract_hash: AddressableEntityHash, - // }, - /// AddressableEntity - AddressableEntity { - /// The package hash - package_hash: PackageHash, - /// The entity hash - entity_hash: AddressableEntityHash, - }, -} - -impl CallStackElement { - /// Creates a [`CallStackElement::Session`]. This represents a call into session code, and - /// should only ever happen once in a call stack. - pub fn session(account_hash: AccountHash) -> Self { - CallStackElement::Session { account_hash } - } - - /// Creates a [`'CallStackElement::StoredContract`]. This represents a call into a contract with - /// `EntryPointType::Contract`. - pub fn stored_contract( - package_hash: PackageHash, - contract_hash: AddressableEntityHash, - ) -> Self { - CallStackElement::AddressableEntity { - package_hash, - entity_hash: contract_hash, - } - } - - // /// Creates a [`'CallStackElement::StoredSession`]. This represents a call into a contract - // with /// `EntryPointType::Session`. - // pub fn stored_session( - // account_hash: AccountHash, - // package_hash: PackageHash, - // contract_hash: AddressableEntityHash, - // ) -> Self { - // CallStackElement::StoredSession { - // account_hash, - // package_hash, - // contract_hash, - // } - // } - - /// Gets the tag from self. - pub fn tag(&self) -> CallStackElementTag { - match self { - CallStackElement::Session { .. } => CallStackElementTag::Session, - - CallStackElement::AddressableEntity { .. } => CallStackElementTag::StoredContract, - } - } - - /// Gets the [`AddressableEntityHash`] for both stored session and stored contract variants. - pub fn contract_hash(&self) -> Option<&AddressableEntityHash> { - match self { - CallStackElement::Session { .. } => None, - - CallStackElement::AddressableEntity { - entity_hash: contract_hash, - .. - } => Some(contract_hash), - } - } -} - -impl ToBytes for CallStackElement { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut result = bytesrepr::allocate_buffer(self)?; - result.push(self.tag() as u8); - match self { - CallStackElement::Session { account_hash } => { - result.append(&mut account_hash.to_bytes()?) - } - - CallStackElement::AddressableEntity { - package_hash, - entity_hash: contract_hash, - } => { - result.append(&mut package_hash.to_bytes()?); - result.append(&mut contract_hash.to_bytes()?); - } - }; - Ok(result) - } - - fn serialized_length(&self) -> usize { - U8_SERIALIZED_LENGTH - + match self { - CallStackElement::Session { account_hash } => account_hash.serialized_length(), - CallStackElement::AddressableEntity { - package_hash, - entity_hash: contract_hash, - } => package_hash.serialized_length() + contract_hash.serialized_length(), - } - } -} - -impl FromBytes for CallStackElement { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (tag, remainder): (u8, &[u8]) = FromBytes::from_bytes(bytes)?; - let tag = CallStackElementTag::from_u8(tag).ok_or(bytesrepr::Error::Formatting)?; - match tag { - CallStackElementTag::Session => { - let (account_hash, remainder) = AccountHash::from_bytes(remainder)?; - Ok((CallStackElement::Session { account_hash }, remainder)) - } - CallStackElementTag::StoredContract => { - let (package_hash, remainder) = PackageHash::from_bytes(remainder)?; - let (contract_hash, remainder) = AddressableEntityHash::from_bytes(remainder)?; - Ok(( - CallStackElement::AddressableEntity { - package_hash, - entity_hash: contract_hash, - }, - remainder, - )) - } - } - } -} - -impl CLTyped for CallStackElement { - fn cl_type() -> CLType { - CLType::Any - } -} diff --git a/casper_types_ver_2_0/src/system/error.rs b/casper_types_ver_2_0/src/system/error.rs deleted file mode 100644 index c63e3f58..00000000 --- a/casper_types_ver_2_0/src/system/error.rs +++ /dev/null @@ -1,43 +0,0 @@ -use core::fmt::{self, Display, Formatter}; - -use crate::system::{auction, handle_payment, mint}; - -/// An aggregate enum error with variants for each system contract's error. -#[derive(Debug, Copy, Clone)] -#[non_exhaustive] -pub enum Error { - /// Contains a [`mint::Error`]. - Mint(mint::Error), - /// Contains a [`handle_payment::Error`]. - HandlePayment(handle_payment::Error), - /// Contains a [`auction::Error`]. - Auction(auction::Error), -} - -impl From for Error { - fn from(error: mint::Error) -> Error { - Error::Mint(error) - } -} - -impl From for Error { - fn from(error: handle_payment::Error) -> Error { - Error::HandlePayment(error) - } -} - -impl From for Error { - fn from(error: auction::Error) -> Error { - Error::Auction(error) - } -} - -impl Display for Error { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - match self { - Error::Mint(error) => write!(formatter, "Mint error: {}", error), - Error::HandlePayment(error) => write!(formatter, "HandlePayment error: {}", error), - Error::Auction(error) => write!(formatter, "Auction error: {}", error), - } - } -} diff --git a/casper_types_ver_2_0/src/system/handle_payment.rs b/casper_types_ver_2_0/src/system/handle_payment.rs deleted file mode 100644 index 1b12f3ec..00000000 --- a/casper_types_ver_2_0/src/system/handle_payment.rs +++ /dev/null @@ -1,8 +0,0 @@ -//! Contains implementation of a Handle Payment contract functionality. -mod constants; -mod entry_points; -mod error; - -pub use constants::*; -pub use entry_points::handle_payment_entry_points; -pub use error::Error; diff --git a/casper_types_ver_2_0/src/system/handle_payment/constants.rs b/casper_types_ver_2_0/src/system/handle_payment/constants.rs deleted file mode 100644 index ef0feedd..00000000 --- a/casper_types_ver_2_0/src/system/handle_payment/constants.rs +++ /dev/null @@ -1,37 +0,0 @@ -/// Named constant for `purse`. -pub const ARG_PURSE: &str = "purse"; -/// Named constant for `amount`. -pub const ARG_AMOUNT: &str = "amount"; -/// Named constant for `source`. -pub const ARG_ACCOUNT: &str = "account"; -/// Named constant for `target`. -pub const ARG_TARGET: &str = "target"; - -/// Named constant for method `get_payment_purse`. -pub const METHOD_GET_PAYMENT_PURSE: &str = "get_payment_purse"; -/// Named constant for method `set_refund_purse`. -pub const METHOD_SET_REFUND_PURSE: &str = "set_refund_purse"; -/// Named constant for method `get_refund_purse`. -pub const METHOD_GET_REFUND_PURSE: &str = "get_refund_purse"; -/// Named constant for method `finalize_payment`. -pub const METHOD_FINALIZE_PAYMENT: &str = "finalize_payment"; -/// Named constant for method `distribute_accumulated_fees`. -pub const METHOD_DISTRIBUTE_ACCUMULATED_FEES: &str = "distribute_accumulated_fees"; - -/// Storage for handle payment contract hash. -pub const CONTRACT_HASH_KEY: &str = "contract_hash"; - -/// Storage for handle payment access key. -pub const CONTRACT_ACCESS_KEY: &str = "access_key"; - -/// The uref name where the Handle Payment accepts payment for computation on behalf of validators. -pub const PAYMENT_PURSE_KEY: &str = "payment_purse"; - -/// The uref name where the Handle Payment will refund unused payment back to the user. The uref -/// this name corresponds to is set by the user. -pub const REFUND_PURSE_KEY: &str = "refund_purse"; -/// Storage for handle payment accumulation purse key. -/// -/// This purse is used when `fee_elimination` config is set to `Accumulate` which makes sense for -/// some private chains. -pub const ACCUMULATION_PURSE_KEY: &str = "accumulation_purse"; diff --git a/casper_types_ver_2_0/src/system/handle_payment/entry_points.rs b/casper_types_ver_2_0/src/system/handle_payment/entry_points.rs deleted file mode 100644 index f07b09f5..00000000 --- a/casper_types_ver_2_0/src/system/handle_payment/entry_points.rs +++ /dev/null @@ -1,66 +0,0 @@ -use alloc::boxed::Box; - -use crate::{ - system::handle_payment::{ - ARG_ACCOUNT, ARG_AMOUNT, ARG_PURSE, METHOD_FINALIZE_PAYMENT, METHOD_GET_PAYMENT_PURSE, - METHOD_GET_REFUND_PURSE, METHOD_SET_REFUND_PURSE, - }, - CLType, EntryPoint, EntryPointAccess, EntryPointType, EntryPoints, Parameter, -}; - -use super::METHOD_DISTRIBUTE_ACCUMULATED_FEES; - -/// Creates handle payment contract entry points. -pub fn handle_payment_entry_points() -> EntryPoints { - let mut entry_points = EntryPoints::new(); - - let get_payment_purse = EntryPoint::new( - METHOD_GET_PAYMENT_PURSE, - vec![], - CLType::URef, - EntryPointAccess::Public, - EntryPointType::AddressableEntity, - ); - entry_points.add_entry_point(get_payment_purse); - - let set_refund_purse = EntryPoint::new( - METHOD_SET_REFUND_PURSE, - vec![Parameter::new(ARG_PURSE, CLType::URef)], - CLType::Unit, - EntryPointAccess::Public, - EntryPointType::AddressableEntity, - ); - entry_points.add_entry_point(set_refund_purse); - - let get_refund_purse = EntryPoint::new( - METHOD_GET_REFUND_PURSE, - vec![], - CLType::Option(Box::new(CLType::URef)), - EntryPointAccess::Public, - EntryPointType::AddressableEntity, - ); - entry_points.add_entry_point(get_refund_purse); - - let finalize_payment = EntryPoint::new( - METHOD_FINALIZE_PAYMENT, - vec![ - Parameter::new(ARG_AMOUNT, CLType::U512), - Parameter::new(ARG_ACCOUNT, CLType::ByteArray(32)), - ], - CLType::Unit, - EntryPointAccess::Public, - EntryPointType::AddressableEntity, - ); - entry_points.add_entry_point(finalize_payment); - - let distribute_accumulated_fees = EntryPoint::new( - METHOD_DISTRIBUTE_ACCUMULATED_FEES, - vec![], - CLType::Unit, - EntryPointAccess::Public, - EntryPointType::AddressableEntity, - ); - entry_points.add_entry_point(distribute_accumulated_fees); - - entry_points -} diff --git a/casper_types_ver_2_0/src/system/handle_payment/error.rs b/casper_types_ver_2_0/src/system/handle_payment/error.rs deleted file mode 100644 index 0c158c93..00000000 --- a/casper_types_ver_2_0/src/system/handle_payment/error.rs +++ /dev/null @@ -1,424 +0,0 @@ -//! Home of the Handle Payment contract's [`enum@Error`] type. -use alloc::vec::Vec; -use core::{ - convert::TryFrom, - fmt::{self, Display, Formatter}, - result, -}; - -use crate::{ - bytesrepr::{self, ToBytes, U8_SERIALIZED_LENGTH}, - CLType, CLTyped, -}; - -/// Errors which can occur while executing the Handle Payment contract. -// TODO: Split this up into user errors vs. system errors. -#[derive(Debug, Copy, Clone, PartialEq, Eq)] -#[repr(u8)] -#[non_exhaustive] -pub enum Error { - // ===== User errors ===== - /// The given validator is not bonded. - /// ``` - /// # use casper_types_ver_2_0::system::handle_payment::Error; - /// assert_eq!(0, Error::NotBonded as u8); - /// ``` - NotBonded = 0, - /// There are too many bonding or unbonding attempts already enqueued to allow more. - /// ``` - /// # use casper_types_ver_2_0::system::handle_payment::Error; - /// assert_eq!(1, Error::TooManyEventsInQueue as u8); - /// ``` - TooManyEventsInQueue = 1, - /// At least one validator must remain bonded. - /// ``` - /// # use casper_types_ver_2_0::system::handle_payment::Error; - /// assert_eq!(2, Error::CannotUnbondLastValidator as u8); - /// ``` - CannotUnbondLastValidator = 2, - /// Failed to bond or unbond as this would have resulted in exceeding the maximum allowed - /// difference between the largest and smallest stakes. - /// ``` - /// # use casper_types_ver_2_0::system::handle_payment::Error; - /// assert_eq!(3, Error::SpreadTooHigh as u8); - /// ``` - SpreadTooHigh = 3, - /// The given validator already has a bond or unbond attempt enqueued. - /// ``` - /// # use casper_types_ver_2_0::system::handle_payment::Error; - /// assert_eq!(4, Error::MultipleRequests as u8); - /// ``` - MultipleRequests = 4, - /// Attempted to bond with a stake which was too small. - /// ``` - /// # use casper_types_ver_2_0::system::handle_payment::Error; - /// assert_eq!(5, Error::BondTooSmall as u8); - /// ``` - BondTooSmall = 5, - /// Attempted to bond with a stake which was too large. - /// ``` - /// # use casper_types_ver_2_0::system::handle_payment::Error; - /// assert_eq!(6, Error::BondTooLarge as u8); - /// ``` - BondTooLarge = 6, - /// Attempted to unbond an amount which was too large. - /// ``` - /// # use casper_types_ver_2_0::system::handle_payment::Error; - /// assert_eq!(7, Error::UnbondTooLarge as u8); - /// ``` - UnbondTooLarge = 7, - /// While bonding, the transfer from source purse to the Handle Payment internal purse failed. - /// ``` - /// # use casper_types_ver_2_0::system::handle_payment::Error; - /// assert_eq!(8, Error::BondTransferFailed as u8); - /// ``` - BondTransferFailed = 8, - /// While unbonding, the transfer from the Handle Payment internal purse to the destination - /// purse failed. - /// ``` - /// # use casper_types_ver_2_0::system::handle_payment::Error; - /// assert_eq!(9, Error::UnbondTransferFailed as u8); - /// ``` - UnbondTransferFailed = 9, - // ===== System errors ===== - /// Internal error: a [`BlockTime`](crate::BlockTime) was unexpectedly out of sequence. - /// ``` - /// # use casper_types_ver_2_0::system::handle_payment::Error; - /// assert_eq!(10, Error::TimeWentBackwards as u8); - /// ``` - TimeWentBackwards = 10, - /// Internal error: stakes were unexpectedly empty. - /// ``` - /// # use casper_types_ver_2_0::system::handle_payment::Error; - /// assert_eq!(11, Error::StakesNotFound as u8); - /// ``` - StakesNotFound = 11, - /// Internal error: the Handle Payment contract's payment purse wasn't found. - /// ``` - /// # use casper_types_ver_2_0::system::handle_payment::Error; - /// assert_eq!(12, Error::PaymentPurseNotFound as u8); - /// ``` - PaymentPurseNotFound = 12, - /// Internal error: the Handle Payment contract's payment purse key was the wrong type. - /// ``` - /// # use casper_types_ver_2_0::system::handle_payment::Error; - /// assert_eq!(13, Error::PaymentPurseKeyUnexpectedType as u8); - /// ``` - PaymentPurseKeyUnexpectedType = 13, - /// Internal error: couldn't retrieve the balance for the Handle Payment contract's payment - /// purse. - /// ``` - /// # use casper_types_ver_2_0::system::handle_payment::Error; - /// assert_eq!(14, Error::PaymentPurseBalanceNotFound as u8); - /// ``` - PaymentPurseBalanceNotFound = 14, - /// Internal error: the Handle Payment contract's bonding purse wasn't found. - /// ``` - /// # use casper_types_ver_2_0::system::handle_payment::Error; - /// assert_eq!(15, Error::BondingPurseNotFound as u8); - /// ``` - BondingPurseNotFound = 15, - /// Internal error: the Handle Payment contract's bonding purse key was the wrong type. - /// ``` - /// # use casper_types_ver_2_0::system::handle_payment::Error; - /// assert_eq!(16, Error::BondingPurseKeyUnexpectedType as u8); - /// ``` - BondingPurseKeyUnexpectedType = 16, - /// Internal error: the Handle Payment contract's refund purse key was the wrong type. - /// ``` - /// # use casper_types_ver_2_0::system::handle_payment::Error; - /// assert_eq!(17, Error::RefundPurseKeyUnexpectedType as u8); - /// ``` - RefundPurseKeyUnexpectedType = 17, - /// Internal error: the Handle Payment contract's rewards purse wasn't found. - /// ``` - /// # use casper_types_ver_2_0::system::handle_payment::Error; - /// assert_eq!(18, Error::RewardsPurseNotFound as u8); - /// ``` - RewardsPurseNotFound = 18, - /// Internal error: the Handle Payment contract's rewards purse key was the wrong type. - /// ``` - /// # use casper_types_ver_2_0::system::handle_payment::Error; - /// assert_eq!(19, Error::RewardsPurseKeyUnexpectedType as u8); - /// ``` - RewardsPurseKeyUnexpectedType = 19, - // TODO: Put these in their own enum, and wrap them separately in `BondingError` and - // `UnbondingError`. - /// Internal error: failed to deserialize the stake's key. - /// ``` - /// # use casper_types_ver_2_0::system::handle_payment::Error; - /// assert_eq!(20, Error::StakesKeyDeserializationFailed as u8); - /// ``` - StakesKeyDeserializationFailed = 20, - /// Internal error: failed to deserialize the stake's balance. - /// ``` - /// # use casper_types_ver_2_0::system::handle_payment::Error; - /// assert_eq!(21, Error::StakesDeserializationFailed as u8); - /// ``` - StakesDeserializationFailed = 21, - /// The invoked Handle Payment function can only be called by system contracts, but was called - /// by a user contract. - /// ``` - /// # use casper_types_ver_2_0::system::handle_payment::Error; - /// assert_eq!(22, Error::SystemFunctionCalledByUserAccount as u8); - /// ``` - SystemFunctionCalledByUserAccount = 22, - /// Internal error: while finalizing payment, the amount spent exceeded the amount available. - /// ``` - /// # use casper_types_ver_2_0::system::handle_payment::Error; - /// assert_eq!(23, Error::InsufficientPaymentForAmountSpent as u8); - /// ``` - InsufficientPaymentForAmountSpent = 23, - /// Internal error: while finalizing payment, failed to pay the validators (the transfer from - /// the Handle Payment contract's payment purse to rewards purse failed). - /// ``` - /// # use casper_types_ver_2_0::system::handle_payment::Error; - /// assert_eq!(24, Error::FailedTransferToRewardsPurse as u8); - /// ``` - FailedTransferToRewardsPurse = 24, - /// Internal error: while finalizing payment, failed to refund the caller's purse (the transfer - /// from the Handle Payment contract's payment purse to refund purse or account's main purse - /// failed). - /// ``` - /// # use casper_types_ver_2_0::system::handle_payment::Error; - /// assert_eq!(25, Error::FailedTransferToAccountPurse as u8); - /// ``` - FailedTransferToAccountPurse = 25, - /// Handle Payment contract's "set_refund_purse" method can only be called by the payment code - /// of a deploy, but was called by the session code. - /// ``` - /// # use casper_types_ver_2_0::system::handle_payment::Error; - /// assert_eq!(26, Error::SetRefundPurseCalledOutsidePayment as u8); - /// ``` - SetRefundPurseCalledOutsidePayment = 26, - /// Raised when the system is unable to determine purse balance. - /// ``` - /// # use casper_types_ver_2_0::system::handle_payment::Error; - /// assert_eq!(27, Error::GetBalance as u8); - /// ``` - GetBalance = 27, - /// Raised when the system is unable to put named key. - /// ``` - /// # use casper_types_ver_2_0::system::handle_payment::Error; - /// assert_eq!(28, Error::PutKey as u8); - /// ``` - PutKey = 28, - /// Raised when the system is unable to remove given named key. - /// ``` - /// # use casper_types_ver_2_0::system::handle_payment::Error; - /// assert_eq!(29, Error::RemoveKey as u8); - /// ``` - RemoveKey = 29, - /// Failed to transfer funds. - /// ``` - /// # use casper_types_ver_2_0::system::handle_payment::Error; - /// assert_eq!(30, Error::Transfer as u8); - /// ``` - Transfer = 30, - /// An arithmetic overflow occurred - /// ``` - /// # use casper_types_ver_2_0::system::handle_payment::Error; - /// assert_eq!(31, Error::ArithmeticOverflow as u8); - /// ``` - ArithmeticOverflow = 31, - // NOTE: These variants below will be removed once support for WASM system contracts will be - // dropped. - #[doc(hidden)] - GasLimit = 32, - /// Refund purse is a payment purse. - /// ``` - /// # use casper_types_ver_2_0::system::handle_payment::Error; - /// assert_eq!(33, Error::RefundPurseIsPaymentPurse as u8); - /// ``` - RefundPurseIsPaymentPurse = 33, - /// Error raised while reducing total supply on the mint system contract. - /// ``` - /// # use casper_types_ver_2_0::system::handle_payment::Error; - /// assert_eq!(34, Error::ReduceTotalSupply as u8); - /// ``` - ReduceTotalSupply = 34, - /// Error writing to a storage. - /// ``` - /// # use casper_types_ver_2_0::system::handle_payment::Error; - /// assert_eq!(35, Error::Storage as u8); - /// ``` - Storage = 35, - /// Internal error: the Handle Payment contract's accumulation purse wasn't found. - /// ``` - /// # use casper_types_ver_2_0::system::handle_payment::Error; - /// assert_eq!(36, Error::AccumulationPurseNotFound as u8); - /// ``` - AccumulationPurseNotFound = 36, - /// Internal error: the Handle Payment contract's accumulation purse key was the wrong type. - /// ``` - /// # use casper_types_ver_2_0::system::handle_payment::Error; - /// assert_eq!(37, Error::AccumulationPurseKeyUnexpectedType as u8); - /// ``` - AccumulationPurseKeyUnexpectedType = 37, -} - -impl Display for Error { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - match self { - Error::NotBonded => formatter.write_str("Not bonded"), - Error::TooManyEventsInQueue => formatter.write_str("Too many events in queue"), - Error::CannotUnbondLastValidator => formatter.write_str("Cannot unbond last validator"), - Error::SpreadTooHigh => formatter.write_str("Spread is too high"), - Error::MultipleRequests => formatter.write_str("Multiple requests"), - Error::BondTooSmall => formatter.write_str("Bond is too small"), - Error::BondTooLarge => formatter.write_str("Bond is too large"), - Error::UnbondTooLarge => formatter.write_str("Unbond is too large"), - Error::BondTransferFailed => formatter.write_str("Bond transfer failed"), - Error::UnbondTransferFailed => formatter.write_str("Unbond transfer failed"), - Error::TimeWentBackwards => formatter.write_str("Time went backwards"), - Error::StakesNotFound => formatter.write_str("Stakes not found"), - Error::PaymentPurseNotFound => formatter.write_str("Payment purse not found"), - Error::PaymentPurseKeyUnexpectedType => { - formatter.write_str("Payment purse has unexpected type") - } - Error::PaymentPurseBalanceNotFound => { - formatter.write_str("Payment purse balance not found") - } - Error::BondingPurseNotFound => formatter.write_str("Bonding purse not found"), - Error::BondingPurseKeyUnexpectedType => { - formatter.write_str("Bonding purse key has unexpected type") - } - Error::RefundPurseKeyUnexpectedType => { - formatter.write_str("Refund purse key has unexpected type") - } - Error::RewardsPurseNotFound => formatter.write_str("Rewards purse not found"), - Error::RewardsPurseKeyUnexpectedType => { - formatter.write_str("Rewards purse has unexpected type") - } - Error::StakesKeyDeserializationFailed => { - formatter.write_str("Failed to deserialize stake's key") - } - Error::StakesDeserializationFailed => { - formatter.write_str("Failed to deserialize stake's balance") - } - Error::SystemFunctionCalledByUserAccount => { - formatter.write_str("System function was called by user account") - } - Error::InsufficientPaymentForAmountSpent => { - formatter.write_str("Insufficient payment for amount spent") - } - Error::FailedTransferToRewardsPurse => { - formatter.write_str("Transfer to rewards purse has failed") - } - Error::FailedTransferToAccountPurse => { - formatter.write_str("Transfer to account's purse failed") - } - Error::SetRefundPurseCalledOutsidePayment => { - formatter.write_str("Set refund purse was called outside payment") - } - Error::GetBalance => formatter.write_str("Unable to get purse balance"), - Error::PutKey => formatter.write_str("Unable to put named key"), - Error::RemoveKey => formatter.write_str("Unable to remove named key"), - Error::Transfer => formatter.write_str("Failed to transfer funds"), - Error::ArithmeticOverflow => formatter.write_str("Arithmetic overflow"), - Error::GasLimit => formatter.write_str("GasLimit"), - Error::RefundPurseIsPaymentPurse => { - formatter.write_str("Refund purse is a payment purse.") - } - Error::ReduceTotalSupply => formatter.write_str("Failed to reduce total supply."), - Error::Storage => formatter.write_str("Failed to write to storage."), - Error::AccumulationPurseNotFound => formatter.write_str("Accumulation purse not found"), - Error::AccumulationPurseKeyUnexpectedType => { - formatter.write_str("Accumulation purse has unexpected type") - } - } - } -} - -impl TryFrom for Error { - type Error = (); - - fn try_from(value: u8) -> Result { - let error = match value { - v if v == Error::NotBonded as u8 => Error::NotBonded, - v if v == Error::TooManyEventsInQueue as u8 => Error::TooManyEventsInQueue, - v if v == Error::CannotUnbondLastValidator as u8 => Error::CannotUnbondLastValidator, - v if v == Error::SpreadTooHigh as u8 => Error::SpreadTooHigh, - v if v == Error::MultipleRequests as u8 => Error::MultipleRequests, - v if v == Error::BondTooSmall as u8 => Error::BondTooSmall, - v if v == Error::BondTooLarge as u8 => Error::BondTooLarge, - v if v == Error::UnbondTooLarge as u8 => Error::UnbondTooLarge, - v if v == Error::BondTransferFailed as u8 => Error::BondTransferFailed, - v if v == Error::UnbondTransferFailed as u8 => Error::UnbondTransferFailed, - v if v == Error::TimeWentBackwards as u8 => Error::TimeWentBackwards, - v if v == Error::StakesNotFound as u8 => Error::StakesNotFound, - v if v == Error::PaymentPurseNotFound as u8 => Error::PaymentPurseNotFound, - v if v == Error::PaymentPurseKeyUnexpectedType as u8 => { - Error::PaymentPurseKeyUnexpectedType - } - v if v == Error::PaymentPurseBalanceNotFound as u8 => { - Error::PaymentPurseBalanceNotFound - } - v if v == Error::BondingPurseNotFound as u8 => Error::BondingPurseNotFound, - v if v == Error::BondingPurseKeyUnexpectedType as u8 => { - Error::BondingPurseKeyUnexpectedType - } - v if v == Error::RefundPurseKeyUnexpectedType as u8 => { - Error::RefundPurseKeyUnexpectedType - } - v if v == Error::RewardsPurseNotFound as u8 => Error::RewardsPurseNotFound, - v if v == Error::RewardsPurseKeyUnexpectedType as u8 => { - Error::RewardsPurseKeyUnexpectedType - } - v if v == Error::StakesKeyDeserializationFailed as u8 => { - Error::StakesKeyDeserializationFailed - } - v if v == Error::StakesDeserializationFailed as u8 => { - Error::StakesDeserializationFailed - } - v if v == Error::SystemFunctionCalledByUserAccount as u8 => { - Error::SystemFunctionCalledByUserAccount - } - v if v == Error::InsufficientPaymentForAmountSpent as u8 => { - Error::InsufficientPaymentForAmountSpent - } - v if v == Error::FailedTransferToRewardsPurse as u8 => { - Error::FailedTransferToRewardsPurse - } - v if v == Error::FailedTransferToAccountPurse as u8 => { - Error::FailedTransferToAccountPurse - } - v if v == Error::SetRefundPurseCalledOutsidePayment as u8 => { - Error::SetRefundPurseCalledOutsidePayment - } - - v if v == Error::GetBalance as u8 => Error::GetBalance, - v if v == Error::PutKey as u8 => Error::PutKey, - v if v == Error::RemoveKey as u8 => Error::RemoveKey, - v if v == Error::Transfer as u8 => Error::Transfer, - v if v == Error::ArithmeticOverflow as u8 => Error::ArithmeticOverflow, - v if v == Error::GasLimit as u8 => Error::GasLimit, - v if v == Error::RefundPurseIsPaymentPurse as u8 => Error::RefundPurseIsPaymentPurse, - v if v == Error::ReduceTotalSupply as u8 => Error::ReduceTotalSupply, - v if v == Error::Storage as u8 => Error::Storage, - v if v == Error::AccumulationPurseNotFound as u8 => Error::AccumulationPurseNotFound, - v if v == Error::AccumulationPurseKeyUnexpectedType as u8 => { - Error::AccumulationPurseKeyUnexpectedType - } - _ => return Err(()), - }; - Ok(error) - } -} - -impl CLTyped for Error { - fn cl_type() -> CLType { - CLType::U8 - } -} - -impl ToBytes for Error { - fn to_bytes(&self) -> result::Result, bytesrepr::Error> { - let value = *self as u8; - value.to_bytes() - } - - fn serialized_length(&self) -> usize { - U8_SERIALIZED_LENGTH - } -} diff --git a/casper_types_ver_2_0/src/system/mint.rs b/casper_types_ver_2_0/src/system/mint.rs deleted file mode 100644 index 4a7e58a1..00000000 --- a/casper_types_ver_2_0/src/system/mint.rs +++ /dev/null @@ -1,8 +0,0 @@ -//! Contains implementation of a Mint contract functionality. -mod constants; -mod entry_points; -mod error; - -pub use constants::*; -pub use entry_points::mint_entry_points; -pub use error::Error; diff --git a/casper_types_ver_2_0/src/system/mint/constants.rs b/casper_types_ver_2_0/src/system/mint/constants.rs deleted file mode 100644 index cffada44..00000000 --- a/casper_types_ver_2_0/src/system/mint/constants.rs +++ /dev/null @@ -1,40 +0,0 @@ -/// Named constant for `purse`. -pub const ARG_PURSE: &str = "purse"; -/// Named constant for `amount`. -pub const ARG_AMOUNT: &str = "amount"; -/// Named constant for `id`. -pub const ARG_ID: &str = "id"; -/// Named constant for `to`. -pub const ARG_TO: &str = "to"; -/// Named constant for `source`. -pub const ARG_SOURCE: &str = "source"; -/// Named constant for `target`. -pub const ARG_TARGET: &str = "target"; -/// Named constant for `round_seigniorage_rate` used in installer. -pub const ARG_ROUND_SEIGNIORAGE_RATE: &str = "round_seigniorage_rate"; - -/// Named constant for method `mint`. -pub const METHOD_MINT: &str = "mint"; -/// Named constant for method `reduce_total_supply`. -pub const METHOD_REDUCE_TOTAL_SUPPLY: &str = "reduce_total_supply"; -/// Named constant for (synthetic) method `create` -pub const METHOD_CREATE: &str = "create"; -/// Named constant for method `balance`. -pub const METHOD_BALANCE: &str = "balance"; -/// Named constant for method `transfer`. -pub const METHOD_TRANSFER: &str = "transfer"; -/// Named constant for method `read_base_round_reward`. -pub const METHOD_READ_BASE_ROUND_REWARD: &str = "read_base_round_reward"; -/// Named constant for method `mint_into_existing_purse`. -pub const METHOD_MINT_INTO_EXISTING_PURSE: &str = "mint_into_existing_purse"; - -/// Storage for mint contract hash. -pub const HASH_KEY: &str = "mint_hash"; -/// Storage for mint access key. -pub const ACCESS_KEY: &str = "mint_access"; -/// Storage for base round reward key. -pub const BASE_ROUND_REWARD_KEY: &str = "mint_base_round_reward"; -/// Storage for mint total supply key. -pub const TOTAL_SUPPLY_KEY: &str = "total_supply"; -/// Storage for mint round seigniorage rate. -pub const ROUND_SEIGNIORAGE_RATE_KEY: &str = "round_seigniorage_rate"; diff --git a/casper_types_ver_2_0/src/system/mint/entry_points.rs b/casper_types_ver_2_0/src/system/mint/entry_points.rs deleted file mode 100644 index 6002b338..00000000 --- a/casper_types_ver_2_0/src/system/mint/entry_points.rs +++ /dev/null @@ -1,102 +0,0 @@ -use alloc::boxed::Box; - -use crate::{ - addressable_entity::Parameters, - system::mint::{ - ARG_AMOUNT, ARG_ID, ARG_PURSE, ARG_SOURCE, ARG_TARGET, ARG_TO, METHOD_BALANCE, - METHOD_CREATE, METHOD_MINT, METHOD_MINT_INTO_EXISTING_PURSE, METHOD_READ_BASE_ROUND_REWARD, - METHOD_REDUCE_TOTAL_SUPPLY, METHOD_TRANSFER, - }, - CLType, EntryPoint, EntryPointAccess, EntryPointType, EntryPoints, Parameter, -}; - -/// Returns entry points for a mint system contract. -pub fn mint_entry_points() -> EntryPoints { - let mut entry_points = EntryPoints::new(); - - let entry_point = EntryPoint::new( - METHOD_MINT, - vec![Parameter::new(ARG_AMOUNT, CLType::U512)], - CLType::Result { - ok: Box::new(CLType::URef), - err: Box::new(CLType::U8), - }, - EntryPointAccess::Public, - EntryPointType::AddressableEntity, - ); - entry_points.add_entry_point(entry_point); - - let entry_point = EntryPoint::new( - METHOD_REDUCE_TOTAL_SUPPLY, - vec![Parameter::new(ARG_AMOUNT, CLType::U512)], - CLType::Result { - ok: Box::new(CLType::Unit), - err: Box::new(CLType::U8), - }, - EntryPointAccess::Public, - EntryPointType::AddressableEntity, - ); - entry_points.add_entry_point(entry_point); - - let entry_point = EntryPoint::new( - METHOD_CREATE, - Parameters::new(), - CLType::URef, - EntryPointAccess::Public, - EntryPointType::AddressableEntity, - ); - entry_points.add_entry_point(entry_point); - - let entry_point = EntryPoint::new( - METHOD_BALANCE, - vec![Parameter::new(ARG_PURSE, CLType::URef)], - CLType::Option(Box::new(CLType::U512)), - EntryPointAccess::Public, - EntryPointType::AddressableEntity, - ); - entry_points.add_entry_point(entry_point); - - let entry_point = EntryPoint::new( - METHOD_TRANSFER, - vec![ - Parameter::new(ARG_TO, CLType::Option(Box::new(CLType::ByteArray(32)))), - Parameter::new(ARG_SOURCE, CLType::URef), - Parameter::new(ARG_TARGET, CLType::URef), - Parameter::new(ARG_AMOUNT, CLType::U512), - Parameter::new(ARG_ID, CLType::Option(Box::new(CLType::U64))), - ], - CLType::Result { - ok: Box::new(CLType::Unit), - err: Box::new(CLType::U8), - }, - EntryPointAccess::Public, - EntryPointType::AddressableEntity, - ); - entry_points.add_entry_point(entry_point); - - let entry_point = EntryPoint::new( - METHOD_READ_BASE_ROUND_REWARD, - Parameters::new(), - CLType::U512, - EntryPointAccess::Public, - EntryPointType::AddressableEntity, - ); - entry_points.add_entry_point(entry_point); - - let entry_point = EntryPoint::new( - METHOD_MINT_INTO_EXISTING_PURSE, - vec![ - Parameter::new(ARG_AMOUNT, CLType::U512), - Parameter::new(ARG_PURSE, CLType::URef), - ], - CLType::Result { - ok: Box::new(CLType::Unit), - err: Box::new(CLType::U8), - }, - EntryPointAccess::Public, - EntryPointType::AddressableEntity, - ); - entry_points.add_entry_point(entry_point); - - entry_points -} diff --git a/casper_types_ver_2_0/src/system/mint/error.rs b/casper_types_ver_2_0/src/system/mint/error.rs deleted file mode 100644 index f7d4f3fb..00000000 --- a/casper_types_ver_2_0/src/system/mint/error.rs +++ /dev/null @@ -1,300 +0,0 @@ -//! Home of the Mint contract's [`enum@Error`] type. - -use alloc::vec::Vec; -use core::{ - convert::{TryFrom, TryInto}, - fmt::{self, Display, Formatter}, -}; - -use crate::{ - bytesrepr::{self, FromBytes, ToBytes, U8_SERIALIZED_LENGTH}, - CLType, CLTyped, -}; - -/// Errors which can occur while executing the Mint contract. -#[derive(Debug, Copy, Clone, PartialEq, Eq)] -#[repr(u8)] -#[non_exhaustive] -pub enum Error { - /// Insufficient funds to complete the transfer. - /// ``` - /// # use casper_types_ver_2_0::system::mint::Error; - /// assert_eq!(0, Error::InsufficientFunds as u8); - /// ``` - InsufficientFunds = 0, - /// Source purse not found. - /// ``` - /// # use casper_types_ver_2_0::system::mint::Error; - /// assert_eq!(1, Error::SourceNotFound as u8); - /// ``` - SourceNotFound = 1, - /// Destination purse not found. - /// ``` - /// # use casper_types_ver_2_0::system::mint::Error; - /// assert_eq!(2, Error::DestNotFound as u8); - /// ``` - DestNotFound = 2, - /// The given [`URef`](crate::URef) does not reference the account holder's purse, or such a - /// `URef` does not have the required [`AccessRights`](crate::AccessRights). - /// ``` - /// # use casper_types_ver_2_0::system::mint::Error; - /// assert_eq!(3, Error::InvalidURef as u8); - /// ``` - InvalidURef = 3, - /// The source purse is not writeable (see [`URef::is_writeable`](crate::URef::is_writeable)), - /// or the destination purse is not addable (see - /// [`URef::is_addable`](crate::URef::is_addable)). - /// ``` - /// # use casper_types_ver_2_0::system::mint::Error; - /// assert_eq!(4, Error::InvalidAccessRights as u8); - /// ``` - InvalidAccessRights = 4, - /// Tried to create a new purse with a non-zero initial balance. - /// ``` - /// # use casper_types_ver_2_0::system::mint::Error; - /// assert_eq!(5, Error::InvalidNonEmptyPurseCreation as u8); - /// ``` - InvalidNonEmptyPurseCreation = 5, - /// Failed to read from local or global storage. - /// ``` - /// # use casper_types_ver_2_0::system::mint::Error; - /// assert_eq!(6, Error::Storage as u8); - /// ``` - Storage = 6, - /// Purse not found while trying to get balance. - /// ``` - /// # use casper_types_ver_2_0::system::mint::Error; - /// assert_eq!(7, Error::PurseNotFound as u8); - /// ``` - PurseNotFound = 7, - /// Unable to obtain a key by its name. - /// ``` - /// # use casper_types_ver_2_0::system::mint::Error; - /// assert_eq!(8, Error::MissingKey as u8); - /// ``` - MissingKey = 8, - /// Total supply not found. - /// ``` - /// # use casper_types_ver_2_0::system::mint::Error; - /// assert_eq!(9, Error::TotalSupplyNotFound as u8); - /// ``` - TotalSupplyNotFound = 9, - /// Failed to record transfer. - /// ``` - /// # use casper_types_ver_2_0::system::mint::Error; - /// assert_eq!(10, Error::RecordTransferFailure as u8); - /// ``` - RecordTransferFailure = 10, - /// Invalid attempt to reduce total supply. - /// ``` - /// # use casper_types_ver_2_0::system::mint::Error; - /// assert_eq!(11, Error::InvalidTotalSupplyReductionAttempt as u8); - /// ``` - InvalidTotalSupplyReductionAttempt = 11, - /// Failed to create new uref. - /// ``` - /// # use casper_types_ver_2_0::system::mint::Error; - /// assert_eq!(12, Error::NewURef as u8); - /// ``` - NewURef = 12, - /// Failed to put key. - /// ``` - /// # use casper_types_ver_2_0::system::mint::Error; - /// assert_eq!(13, Error::PutKey as u8); - /// ``` - PutKey = 13, - /// Failed to write to dictionary. - /// ``` - /// # use casper_types_ver_2_0::system::mint::Error; - /// assert_eq!(14, Error::WriteDictionary as u8); - /// ``` - WriteDictionary = 14, - /// Failed to create a [`crate::CLValue`]. - /// ``` - /// # use casper_types_ver_2_0::system::mint::Error; - /// assert_eq!(15, Error::CLValue as u8); - /// ``` - CLValue = 15, - /// Failed to serialize data. - /// ``` - /// # use casper_types_ver_2_0::system::mint::Error; - /// assert_eq!(16, Error::Serialize as u8); - /// ``` - Serialize = 16, - /// Source and target purse [`crate::URef`]s are equal. - /// ``` - /// # use casper_types_ver_2_0::system::mint::Error; - /// assert_eq!(17, Error::EqualSourceAndTarget as u8); - /// ``` - EqualSourceAndTarget = 17, - /// An arithmetic overflow has occurred. - /// ``` - /// # use casper_types_ver_2_0::system::mint::Error; - /// assert_eq!(18, Error::ArithmeticOverflow as u8); - /// ``` - ArithmeticOverflow = 18, - - // NOTE: These variants below will be removed once support for WASM system contracts will be - // dropped. - #[doc(hidden)] - GasLimit = 19, - - /// Raised when an entry point is called from invalid account context. - InvalidContext = 20, - - /// Session code tried to transfer more CSPR than user approved. - /// ``` - /// # use casper_types_ver_2_0::system::mint::Error; - /// assert_eq!(21, Error::UnapprovedSpendingAmount as u8); - UnapprovedSpendingAmount = 21, - - /// Failed to transfer tokens on a private chain. - /// ``` - /// # use casper_types_ver_2_0::system::mint::Error; - /// assert_eq!(22, Error::DisabledUnrestrictedTransfers as u8); - DisabledUnrestrictedTransfers = 22, - - #[cfg(test)] - #[doc(hidden)] - Sentinel, -} - -/// Used for testing; this should be guaranteed to be the maximum valid value of [`Error`] enum. -#[cfg(test)] -const MAX_ERROR_VALUE: u8 = Error::Sentinel as u8; - -impl CLTyped for Error { - fn cl_type() -> CLType { - CLType::U8 - } -} - -// This error type is not intended to be used by third party crates. -#[doc(hidden)] -pub struct TryFromU8ForError(()); - -// This conversion is not intended to be used by third party crates. -#[doc(hidden)] -impl TryFrom for Error { - type Error = TryFromU8ForError; - - fn try_from(value: u8) -> Result { - match value { - d if d == Error::InsufficientFunds as u8 => Ok(Error::InsufficientFunds), - d if d == Error::SourceNotFound as u8 => Ok(Error::SourceNotFound), - d if d == Error::DestNotFound as u8 => Ok(Error::DestNotFound), - d if d == Error::InvalidURef as u8 => Ok(Error::InvalidURef), - d if d == Error::InvalidAccessRights as u8 => Ok(Error::InvalidAccessRights), - d if d == Error::InvalidNonEmptyPurseCreation as u8 => { - Ok(Error::InvalidNonEmptyPurseCreation) - } - d if d == Error::Storage as u8 => Ok(Error::Storage), - d if d == Error::PurseNotFound as u8 => Ok(Error::PurseNotFound), - d if d == Error::MissingKey as u8 => Ok(Error::MissingKey), - d if d == Error::TotalSupplyNotFound as u8 => Ok(Error::TotalSupplyNotFound), - d if d == Error::RecordTransferFailure as u8 => Ok(Error::RecordTransferFailure), - d if d == Error::InvalidTotalSupplyReductionAttempt as u8 => { - Ok(Error::InvalidTotalSupplyReductionAttempt) - } - d if d == Error::NewURef as u8 => Ok(Error::NewURef), - d if d == Error::PutKey as u8 => Ok(Error::PutKey), - d if d == Error::WriteDictionary as u8 => Ok(Error::WriteDictionary), - d if d == Error::CLValue as u8 => Ok(Error::CLValue), - d if d == Error::Serialize as u8 => Ok(Error::Serialize), - d if d == Error::EqualSourceAndTarget as u8 => Ok(Error::EqualSourceAndTarget), - d if d == Error::ArithmeticOverflow as u8 => Ok(Error::ArithmeticOverflow), - d if d == Error::GasLimit as u8 => Ok(Error::GasLimit), - d if d == Error::InvalidContext as u8 => Ok(Error::InvalidContext), - d if d == Error::UnapprovedSpendingAmount as u8 => Ok(Error::UnapprovedSpendingAmount), - d if d == Error::DisabledUnrestrictedTransfers as u8 => { - Ok(Error::DisabledUnrestrictedTransfers) - } - _ => Err(TryFromU8ForError(())), - } - } -} - -impl ToBytes for Error { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let value = *self as u8; - value.to_bytes() - } - - fn serialized_length(&self) -> usize { - U8_SERIALIZED_LENGTH - } -} - -impl FromBytes for Error { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (value, rem): (u8, _) = FromBytes::from_bytes(bytes)?; - let error: Error = value - .try_into() - // In case an Error variant is unable to be determined it would return an - // Error::Formatting as if its unable to be correctly deserialized. - .map_err(|_| bytesrepr::Error::Formatting)?; - Ok((error, rem)) - } -} - -impl Display for Error { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - match self { - Error::InsufficientFunds => formatter.write_str("Insufficient funds"), - Error::SourceNotFound => formatter.write_str("Source not found"), - Error::DestNotFound => formatter.write_str("Destination not found"), - Error::InvalidURef => formatter.write_str("Invalid URef"), - Error::InvalidAccessRights => formatter.write_str("Invalid AccessRights"), - Error::InvalidNonEmptyPurseCreation => { - formatter.write_str("Invalid non-empty purse creation") - } - Error::Storage => formatter.write_str("Storage error"), - Error::PurseNotFound => formatter.write_str("Purse not found"), - Error::MissingKey => formatter.write_str("Missing key"), - Error::TotalSupplyNotFound => formatter.write_str("Total supply not found"), - Error::RecordTransferFailure => formatter.write_str("Failed to record transfer"), - Error::InvalidTotalSupplyReductionAttempt => { - formatter.write_str("Invalid attempt to reduce total supply") - } - Error::NewURef => formatter.write_str("Failed to create new uref"), - Error::PutKey => formatter.write_str("Failed to put key"), - Error::WriteDictionary => formatter.write_str("Failed to write dictionary"), - Error::CLValue => formatter.write_str("Failed to create a CLValue"), - Error::Serialize => formatter.write_str("Failed to serialize data"), - Error::EqualSourceAndTarget => formatter.write_str("Invalid target purse"), - Error::ArithmeticOverflow => formatter.write_str("Arithmetic overflow has occurred"), - Error::GasLimit => formatter.write_str("GasLimit"), - Error::InvalidContext => formatter.write_str("Invalid context"), - Error::UnapprovedSpendingAmount => formatter.write_str("Unapproved spending amount"), - Error::DisabledUnrestrictedTransfers => { - formatter.write_str("Disabled unrestricted transfers") - } - #[cfg(test)] - Error::Sentinel => formatter.write_str("Sentinel error"), - } - } -} - -#[cfg(test)] -mod tests { - use std::convert::TryFrom; - - use super::{Error, TryFromU8ForError, MAX_ERROR_VALUE}; - - #[test] - fn error_round_trips() { - for i in 0..=u8::max_value() { - match Error::try_from(i) { - Ok(error) if i < MAX_ERROR_VALUE => assert_eq!(error as u8, i), - Ok(error) => panic!( - "value of variant {:?} ({}) exceeds MAX_ERROR_VALUE ({})", - error, i, MAX_ERROR_VALUE - ), - Err(TryFromU8ForError(())) if i >= MAX_ERROR_VALUE => (), - Err(TryFromU8ForError(())) => { - panic!("missing conversion from u8 to error value: {}", i) - } - } - } - } -} diff --git a/casper_types_ver_2_0/src/system/standard_payment.rs b/casper_types_ver_2_0/src/system/standard_payment.rs deleted file mode 100644 index 92c3fab3..00000000 --- a/casper_types_ver_2_0/src/system/standard_payment.rs +++ /dev/null @@ -1,6 +0,0 @@ -//! Contains implementation of a standard payment contract implementation. -mod constants; -mod entry_points; - -pub use constants::*; -pub use entry_points::standard_payment_entry_points; diff --git a/casper_types_ver_2_0/src/system/standard_payment/constants.rs b/casper_types_ver_2_0/src/system/standard_payment/constants.rs deleted file mode 100644 index 9bd88784..00000000 --- a/casper_types_ver_2_0/src/system/standard_payment/constants.rs +++ /dev/null @@ -1,10 +0,0 @@ -/// Named constant for `amount`. -pub const ARG_AMOUNT: &str = "amount"; - -/// Named constant for method `pay`. -pub const METHOD_PAY: &str = "pay"; - -/// Storage for standard payment contract hash. -pub const HASH_KEY: &str = "standard_payment_hash"; -/// Storage for standard payment access key. -pub const ACCESS_KEY: &str = "standard_payment_access"; diff --git a/casper_types_ver_2_0/src/system/standard_payment/entry_points.rs b/casper_types_ver_2_0/src/system/standard_payment/entry_points.rs deleted file mode 100644 index 3eeaed52..00000000 --- a/casper_types_ver_2_0/src/system/standard_payment/entry_points.rs +++ /dev/null @@ -1,25 +0,0 @@ -use alloc::{boxed::Box, string::ToString}; - -use crate::{ - system::standard_payment::{ARG_AMOUNT, METHOD_PAY}, - CLType, EntryPoint, EntryPointAccess, EntryPointType, EntryPoints, Parameter, -}; - -/// Creates standard payment contract entry points. -pub fn standard_payment_entry_points() -> EntryPoints { - let mut entry_points = EntryPoints::new(); - - let entry_point = EntryPoint::new( - METHOD_PAY.to_string(), - vec![Parameter::new(ARG_AMOUNT, CLType::U512)], - CLType::Result { - ok: Box::new(CLType::Unit), - err: Box::new(CLType::U32), - }, - EntryPointAccess::Public, - EntryPointType::Session, - ); - entry_points.add_entry_point(entry_point); - - entry_points -} diff --git a/casper_types_ver_2_0/src/system/system_contract_type.rs b/casper_types_ver_2_0/src/system/system_contract_type.rs deleted file mode 100644 index 0ad6551a..00000000 --- a/casper_types_ver_2_0/src/system/system_contract_type.rs +++ /dev/null @@ -1,249 +0,0 @@ -//! Home of system contract type enum. - -use alloc::{ - string::{String, ToString}, - vec::Vec, -}; -use core::{ - convert::TryFrom, - fmt::{self, Display, Formatter}, -}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(any(feature = "testing", test))] -use rand::{ - distributions::{Distribution, Standard}, - Rng, -}; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -use crate::{ - bytesrepr::{self, Error, FromBytes, ToBytes, U8_SERIALIZED_LENGTH}, - ApiError, EntryPoints, -}; - -const MINT_TAG: u8 = 0; -const HANDLE_PAYMENT_TAG: u8 = 1; -const STANDARD_PAYMENT_TAG: u8 = 2; -const AUCTION_TAG: u8 = 3; - -use super::{ - auction::auction_entry_points, handle_payment::handle_payment_entry_points, - mint::mint_entry_points, standard_payment::standard_payment_entry_points, -}; - -/// System contract types. -/// -/// Used by converting to a `u32` and passing as the `system_contract_index` argument of -/// `ext_ffi::casper_get_system_contract()`. -#[derive( - Debug, Clone, PartialEq, Eq, Default, PartialOrd, Ord, Hash, Serialize, Deserialize, Copy, -)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -pub enum SystemEntityType { - /// Mint contract. - #[default] - Mint, - /// Handle Payment contract. - HandlePayment, - /// Standard Payment contract. - StandardPayment, - /// Auction contract. - Auction, -} - -impl ToBytes for SystemEntityType { - fn to_bytes(&self) -> Result, Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - U8_SERIALIZED_LENGTH - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), Error> { - match self { - SystemEntityType::Mint => { - writer.push(MINT_TAG); - } - SystemEntityType::HandlePayment => { - writer.push(HANDLE_PAYMENT_TAG); - } - SystemEntityType::StandardPayment => { - writer.push(STANDARD_PAYMENT_TAG); - } - SystemEntityType::Auction => writer.push(AUCTION_TAG), - } - Ok(()) - } -} - -impl FromBytes for SystemEntityType { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (tag, remainder) = u8::from_bytes(bytes)?; - match tag { - MINT_TAG => Ok((SystemEntityType::Mint, remainder)), - HANDLE_PAYMENT_TAG => Ok((SystemEntityType::HandlePayment, remainder)), - STANDARD_PAYMENT_TAG => Ok((SystemEntityType::StandardPayment, remainder)), - AUCTION_TAG => Ok((SystemEntityType::Auction, remainder)), - _ => Err(Error::Formatting), - } - } -} - -#[cfg(any(feature = "testing", test))] -impl Distribution for Standard { - fn sample(&self, rng: &mut R) -> SystemEntityType { - match rng.gen_range(0..=3) { - 0 => SystemEntityType::Mint, - 1 => SystemEntityType::Auction, - 2 => SystemEntityType::StandardPayment, - 3 => SystemEntityType::HandlePayment, - _ => unreachable!(), - } - } -} - -/// Name of mint system contract -pub const MINT: &str = "mint"; -/// Name of handle payment system contract -pub const HANDLE_PAYMENT: &str = "handle payment"; -/// Name of standard payment system contract -pub const STANDARD_PAYMENT: &str = "standard payment"; -/// Name of auction system contract -pub const AUCTION: &str = "auction"; - -impl SystemEntityType { - /// Returns the name of the system contract. - pub fn contract_name(&self) -> String { - match self { - SystemEntityType::Mint => MINT.to_string(), - SystemEntityType::HandlePayment => HANDLE_PAYMENT.to_string(), - SystemEntityType::StandardPayment => STANDARD_PAYMENT.to_string(), - SystemEntityType::Auction => AUCTION.to_string(), - } - } - - /// Returns the entrypoint of the system contract. - pub fn contract_entry_points(&self) -> EntryPoints { - match self { - SystemEntityType::Mint => mint_entry_points(), - SystemEntityType::HandlePayment => handle_payment_entry_points(), - SystemEntityType::StandardPayment => standard_payment_entry_points(), - SystemEntityType::Auction => auction_entry_points(), - } - } -} - -impl From for u32 { - fn from(system_contract_type: SystemEntityType) -> u32 { - match system_contract_type { - SystemEntityType::Mint => 0, - SystemEntityType::HandlePayment => 1, - SystemEntityType::StandardPayment => 2, - SystemEntityType::Auction => 3, - } - } -} - -// This conversion is not intended to be used by third party crates. -#[doc(hidden)] -impl TryFrom for SystemEntityType { - type Error = ApiError; - fn try_from(value: u32) -> Result { - match value { - 0 => Ok(SystemEntityType::Mint), - 1 => Ok(SystemEntityType::HandlePayment), - 2 => Ok(SystemEntityType::StandardPayment), - 3 => Ok(SystemEntityType::Auction), - _ => Err(ApiError::InvalidSystemContract), - } - } -} - -impl Display for SystemEntityType { - fn fmt(&self, f: &mut Formatter) -> fmt::Result { - match *self { - SystemEntityType::Mint => write!(f, "{}", MINT), - SystemEntityType::HandlePayment => write!(f, "{}", HANDLE_PAYMENT), - SystemEntityType::StandardPayment => write!(f, "{}", STANDARD_PAYMENT), - SystemEntityType::Auction => write!(f, "{}", AUCTION), - } - } -} - -#[cfg(test)] -mod tests { - use std::string::ToString; - - use super::*; - - #[test] - fn get_index_of_mint_contract() { - let index: u32 = SystemEntityType::Mint.into(); - assert_eq!(index, 0u32); - assert_eq!(SystemEntityType::Mint.to_string(), MINT); - } - - #[test] - fn get_index_of_handle_payment_contract() { - let index: u32 = SystemEntityType::HandlePayment.into(); - assert_eq!(index, 1u32); - assert_eq!(SystemEntityType::HandlePayment.to_string(), HANDLE_PAYMENT); - } - - #[test] - fn get_index_of_standard_payment_contract() { - let index: u32 = SystemEntityType::StandardPayment.into(); - assert_eq!(index, 2u32); - assert_eq!( - SystemEntityType::StandardPayment.to_string(), - STANDARD_PAYMENT - ); - } - - #[test] - fn get_index_of_auction_contract() { - let index: u32 = SystemEntityType::Auction.into(); - assert_eq!(index, 3u32); - assert_eq!(SystemEntityType::Auction.to_string(), AUCTION); - } - - #[test] - fn create_mint_variant_from_int() { - let mint = SystemEntityType::try_from(0).ok().unwrap(); - assert_eq!(mint, SystemEntityType::Mint); - } - - #[test] - fn create_handle_payment_variant_from_int() { - let handle_payment = SystemEntityType::try_from(1).ok().unwrap(); - assert_eq!(handle_payment, SystemEntityType::HandlePayment); - } - - #[test] - fn create_standard_payment_variant_from_int() { - let handle_payment = SystemEntityType::try_from(2).ok().unwrap(); - assert_eq!(handle_payment, SystemEntityType::StandardPayment); - } - - #[test] - fn create_auction_variant_from_int() { - let auction = SystemEntityType::try_from(3).ok().unwrap(); - assert_eq!(auction, SystemEntityType::Auction); - } - - #[test] - fn create_unknown_system_contract_variant() { - assert!(SystemEntityType::try_from(4).is_err()); - assert!(SystemEntityType::try_from(5).is_err()); - assert!(SystemEntityType::try_from(10).is_err()); - assert!(SystemEntityType::try_from(u32::max_value()).is_err()); - } -} diff --git a/casper_types_ver_2_0/src/tagged.rs b/casper_types_ver_2_0/src/tagged.rs deleted file mode 100644 index deddfe83..00000000 --- a/casper_types_ver_2_0/src/tagged.rs +++ /dev/null @@ -1,5 +0,0 @@ -/// The quality of having a tag -pub trait Tagged { - /// Returns the tag of a given object - fn tag(&self) -> T; -} diff --git a/casper_types_ver_2_0/src/testing.rs b/casper_types_ver_2_0/src/testing.rs deleted file mode 100644 index 24b7efd3..00000000 --- a/casper_types_ver_2_0/src/testing.rs +++ /dev/null @@ -1,195 +0,0 @@ -//! An RNG for testing purposes. -use std::{ - cell::RefCell, - cmp, env, - fmt::{self, Debug, Display, Formatter}, - iter, thread, -}; - -use rand::{ - self, - distributions::{uniform::SampleRange, Distribution, Standard}, - CryptoRng, Error, Rng, RngCore, SeedableRng, -}; -use rand_pcg::Pcg64Mcg; - -thread_local! { - static THIS_THREAD_HAS_RNG: RefCell = RefCell::new(false); -} - -const CL_TEST_SEED: &str = "CL_TEST_SEED"; - -type Seed = ::Seed; // [u8; 16] - -/// A fast, seedable pseudorandom number generator for use in tests which prints the seed if the -/// thread in which it is created panics. -/// -/// Only one `TestRng` is permitted per thread. -pub struct TestRng { - seed: Seed, - rng: Pcg64Mcg, -} - -impl TestRng { - /// Constructs a new `TestRng` using a seed generated from the env var `CL_TEST_SEED` if set or - /// from cryptographically secure random data if not. - /// - /// Note that `new()` or `default()` should only be called once per test. If a test needs to - /// spawn multiple threads each with their own `TestRng`, then use `new()` to create a single, - /// master `TestRng`, then use it to create a seed per child thread. The child `TestRng`s can - /// then be constructed in their own threads via `from_seed()`. - /// - /// # Panics - /// - /// Panics if a `TestRng` has already been created on this thread. - pub fn new() -> Self { - Self::set_flag_or_panic(); - - let mut seed = Seed::default(); - match env::var(CL_TEST_SEED) { - Ok(seed_as_hex) => { - base16::decode_slice(&seed_as_hex, &mut seed).unwrap_or_else(|error| { - THIS_THREAD_HAS_RNG.with(|flag| { - *flag.borrow_mut() = false; - }); - panic!("can't parse '{}' as a TestRng seed: {}", seed_as_hex, error) - }); - } - Err(_) => { - rand::thread_rng().fill(&mut seed); - } - }; - - let rng = Pcg64Mcg::from_seed(seed); - - TestRng { seed, rng } - } - - /// Constructs a new `TestRng` using `seed`. This should be used in cases where a test needs to - /// spawn multiple threads each with their own `TestRng`. A single, master `TestRng` should be - /// constructed before any child threads are spawned, and that one should be used to create - /// seeds for the child threads' `TestRng`s. - /// - /// # Panics - /// - /// Panics if a `TestRng` has already been created on this thread. - pub fn from_seed(seed: Seed) -> Self { - Self::set_flag_or_panic(); - let rng = Pcg64Mcg::from_seed(seed); - TestRng { seed, rng } - } - - /// Returns a random `String` of length within the range specified by `length_range`. - pub fn random_string>(&mut self, length_range: R) -> String { - let count = self.gen_range(length_range); - iter::repeat_with(|| self.gen::()) - .take(count) - .collect() - } - - /// Returns a random `Vec` of length within the range specified by `length_range`. - pub fn random_vec, T>(&mut self, length_range: R) -> Vec - where - Standard: Distribution, - { - let count = self.gen_range(length_range); - iter::repeat_with(|| self.gen::()).take(count).collect() - } - - fn set_flag_or_panic() { - THIS_THREAD_HAS_RNG.with(|flag| { - if *flag.borrow() { - panic!("cannot create multiple TestRngs on the same thread"); - } - *flag.borrow_mut() = true; - }); - } - - /// Creates a child RNG. - /// - /// The resulting RNG is seeded from `self` deterministically. - pub fn create_child(&mut self) -> Self { - let seed = self.gen(); - let rng = Pcg64Mcg::from_seed(seed); - TestRng { seed, rng } - } -} - -impl Default for TestRng { - fn default() -> Self { - TestRng::new() - } -} - -impl Display for TestRng { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - write!( - formatter, - "TestRng seed: {}", - base16::encode_lower(&self.seed) - ) - } -} - -impl Debug for TestRng { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - Display::fmt(self, formatter) - } -} - -impl Drop for TestRng { - fn drop(&mut self) { - if thread::panicking() { - let line_1 = format!("Thread: {}", thread::current().name().unwrap_or("unnamed")); - let line_2 = "To reproduce failure, try running with env var:"; - let line_3 = format!("{}={}", CL_TEST_SEED, base16::encode_lower(&self.seed)); - let max_length = cmp::max(line_1.len(), line_2.len()); - let border = "=".repeat(max_length); - println!( - "\n{}\n{}\n{}\n{}\n{}\n", - border, line_1, line_2, line_3, border - ); - } - } -} - -impl SeedableRng for TestRng { - type Seed = ::Seed; - - fn from_seed(seed: Self::Seed) -> Self { - Self::from_seed(seed) - } -} - -impl RngCore for TestRng { - fn next_u32(&mut self) -> u32 { - self.rng.next_u32() - } - - fn next_u64(&mut self) -> u64 { - self.rng.next_u64() - } - - fn fill_bytes(&mut self, dest: &mut [u8]) { - self.rng.fill_bytes(dest) - } - - fn try_fill_bytes(&mut self, dest: &mut [u8]) -> Result<(), Error> { - self.rng.try_fill_bytes(dest) - } -} - -impl CryptoRng for TestRng {} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - #[should_panic(expected = "cannot create multiple TestRngs on the same thread")] - fn second_test_rng_in_thread_should_panic() { - let _test_rng1 = TestRng::new(); - let seed = [1; 16]; - let _test_rng2 = TestRng::from_seed(seed); - } -} diff --git a/casper_types_ver_2_0/src/timestamp.rs b/casper_types_ver_2_0/src/timestamp.rs deleted file mode 100644 index 524d0b14..00000000 --- a/casper_types_ver_2_0/src/timestamp.rs +++ /dev/null @@ -1,470 +0,0 @@ -use alloc::vec::Vec; -use core::{ - fmt::{self, Display, Formatter}, - ops::{Add, AddAssign, Div, Mul, Rem, Shl, Shr, Sub, SubAssign}, - time::Duration, -}; -#[cfg(any(feature = "std", test))] -use std::{str::FromStr, time::SystemTime}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(any(feature = "std", test))] -use humantime::{DurationError, TimestampError}; -#[cfg(any(feature = "testing", test))] -use rand::Rng; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -#[cfg(any(feature = "std", test))] -use serde::{de::Error as SerdeError, Deserialize, Deserializer, Serialize, Serializer}; - -use crate::bytesrepr::{self, FromBytes, ToBytes}; - -#[cfg(any(feature = "testing", test))] -use crate::testing::TestRng; - -/// Example timestamp equal to 2020-11-17T00:39:24.072Z. -#[cfg(feature = "json-schema")] -const TIMESTAMP: Timestamp = Timestamp(1_605_573_564_072); - -/// A timestamp type, representing a concrete moment in time. -#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr( - feature = "json-schema", - derive(JsonSchema), - schemars(description = "Timestamp formatted as per RFC 3339") -)] -pub struct Timestamp(#[cfg_attr(feature = "json-schema", schemars(with = "String"))] u64); - -impl Timestamp { - /// The maximum value a timestamp can have. - pub const MAX: Timestamp = Timestamp(u64::MAX); - - #[cfg(any(feature = "std", test))] - /// Returns the timestamp of the current moment. - pub fn now() -> Self { - let millis = SystemTime::UNIX_EPOCH.elapsed().unwrap().as_millis() as u64; - Timestamp(millis) - } - - #[cfg(any(feature = "std", test))] - /// Returns the time that has elapsed since this timestamp. - pub fn elapsed(&self) -> TimeDiff { - TimeDiff(Timestamp::now().0.saturating_sub(self.0)) - } - - /// Returns a zero timestamp. - pub fn zero() -> Self { - Timestamp(0) - } - - /// Returns the timestamp as the number of milliseconds since the Unix epoch - pub fn millis(&self) -> u64 { - self.0 - } - - /// Returns the difference between `self` and `other`, or `0` if `self` is earlier than `other`. - pub fn saturating_diff(self, other: Timestamp) -> TimeDiff { - TimeDiff(self.0.saturating_sub(other.0)) - } - - /// Returns the difference between `self` and `other`, or `0` if that would be before the epoch. - #[must_use] - pub fn saturating_sub(self, other: TimeDiff) -> Timestamp { - Timestamp(self.0.saturating_sub(other.0)) - } - - /// Returns the sum of `self` and `other`, or the maximum possible value if that would be - /// exceeded. - #[must_use] - pub fn saturating_add(self, other: TimeDiff) -> Timestamp { - Timestamp(self.0.saturating_add(other.0)) - } - - /// Returns the number of trailing zeros in the number of milliseconds since the epoch. - pub fn trailing_zeros(&self) -> u8 { - self.0.trailing_zeros() as u8 - } - - // This method is not intended to be used by third party crates. - #[doc(hidden)] - #[cfg(feature = "json-schema")] - pub fn example() -> &'static Self { - &TIMESTAMP - } - - /// Returns a random `Timestamp`. - #[cfg(any(feature = "testing", test))] - pub fn random(rng: &mut TestRng) -> Self { - Timestamp(1_596_763_000_000 + rng.gen_range(200_000..1_000_000)) - } - - /// Checked subtraction for timestamps - #[cfg(any(feature = "testing", test))] - pub fn checked_sub(self, other: TimeDiff) -> Option { - self.0.checked_sub(other.0).map(Timestamp) - } -} - -impl Display for Timestamp { - fn fmt(&self, f: &mut Formatter) -> fmt::Result { - #[cfg(any(feature = "std", test))] - return match SystemTime::UNIX_EPOCH.checked_add(Duration::from_millis(self.0)) { - Some(system_time) => write!(f, "{}", humantime::format_rfc3339_millis(system_time)) - .or_else(|e| write!(f, "Invalid timestamp: {}: {}", e, self.0)), - None => write!(f, "invalid Timestamp: {} ms after the Unix epoch", self.0), - }; - - #[cfg(not(any(feature = "std", test)))] - write!(f, "timestamp({}ms)", self.0) - } -} - -#[cfg(any(feature = "std", test))] -impl FromStr for Timestamp { - type Err = TimestampError; - - fn from_str(value: &str) -> Result { - let system_time = humantime::parse_rfc3339_weak(value)?; - let inner = system_time - .duration_since(SystemTime::UNIX_EPOCH) - .map_err(|_| TimestampError::OutOfRange)? - .as_millis() as u64; - Ok(Timestamp(inner)) - } -} - -impl Add for Timestamp { - type Output = Timestamp; - - fn add(self, diff: TimeDiff) -> Timestamp { - Timestamp(self.0 + diff.0) - } -} - -impl AddAssign for Timestamp { - fn add_assign(&mut self, rhs: TimeDiff) { - self.0 += rhs.0; - } -} - -#[cfg(any(feature = "testing", test))] -impl Sub for Timestamp { - type Output = Timestamp; - - fn sub(self, diff: TimeDiff) -> Timestamp { - Timestamp(self.0 - diff.0) - } -} - -impl Rem for Timestamp { - type Output = TimeDiff; - - fn rem(self, diff: TimeDiff) -> TimeDiff { - TimeDiff(self.0 % diff.0) - } -} - -impl Shl for Timestamp -where - u64: Shl, -{ - type Output = Timestamp; - - fn shl(self, rhs: T) -> Timestamp { - Timestamp(self.0 << rhs) - } -} - -impl Shr for Timestamp -where - u64: Shr, -{ - type Output = Timestamp; - - fn shr(self, rhs: T) -> Timestamp { - Timestamp(self.0 >> rhs) - } -} - -#[cfg(any(feature = "std", test))] -impl Serialize for Timestamp { - fn serialize(&self, serializer: S) -> Result { - if serializer.is_human_readable() { - self.to_string().serialize(serializer) - } else { - self.0.serialize(serializer) - } - } -} - -#[cfg(any(feature = "std", test))] -impl<'de> Deserialize<'de> for Timestamp { - fn deserialize>(deserializer: D) -> Result { - if deserializer.is_human_readable() { - let value_as_string = String::deserialize(deserializer)?; - Timestamp::from_str(&value_as_string).map_err(SerdeError::custom) - } else { - let inner = u64::deserialize(deserializer)?; - Ok(Timestamp(inner)) - } - } -} - -impl ToBytes for Timestamp { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - self.0.to_bytes() - } - - fn serialized_length(&self) -> usize { - self.0.serialized_length() - } -} - -impl FromBytes for Timestamp { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - u64::from_bytes(bytes).map(|(inner, remainder)| (Timestamp(inner), remainder)) - } -} - -impl From for Timestamp { - fn from(milliseconds_since_epoch: u64) -> Timestamp { - Timestamp(milliseconds_since_epoch) - } -} - -/// A time difference between two timestamps. -#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr( - feature = "json-schema", - derive(JsonSchema), - schemars(description = "Human-readable duration.") -)] -pub struct TimeDiff(#[cfg_attr(feature = "json-schema", schemars(with = "String"))] u64); - -impl Display for TimeDiff { - fn fmt(&self, f: &mut Formatter) -> fmt::Result { - #[cfg(any(feature = "std", test))] - return write!(f, "{}", humantime::format_duration(Duration::from(*self))); - - #[cfg(not(any(feature = "std", test)))] - write!(f, "time diff({}ms)", self.0) - } -} - -#[cfg(any(feature = "std", test))] -impl FromStr for TimeDiff { - type Err = DurationError; - - fn from_str(value: &str) -> Result { - let inner = humantime::parse_duration(value)?.as_millis() as u64; - Ok(TimeDiff(inner)) - } -} - -impl TimeDiff { - /// Returns the time difference as the number of milliseconds since the Unix epoch - pub fn millis(&self) -> u64 { - self.0 - } - - /// Creates a new time difference from seconds. - pub const fn from_seconds(seconds: u32) -> Self { - TimeDiff(seconds as u64 * 1_000) - } - - /// Creates a new time difference from milliseconds. - pub const fn from_millis(millis: u64) -> Self { - TimeDiff(millis) - } - - /// Returns the product, or `TimeDiff(u64::MAX)` if it would overflow. - #[must_use] - pub fn saturating_mul(self, rhs: u64) -> Self { - TimeDiff(self.0.saturating_mul(rhs)) - } -} - -impl Add for TimeDiff { - type Output = TimeDiff; - - fn add(self, rhs: TimeDiff) -> TimeDiff { - TimeDiff(self.0 + rhs.0) - } -} - -impl AddAssign for TimeDiff { - fn add_assign(&mut self, rhs: TimeDiff) { - self.0 += rhs.0; - } -} - -impl Sub for TimeDiff { - type Output = TimeDiff; - - fn sub(self, rhs: TimeDiff) -> TimeDiff { - TimeDiff(self.0 - rhs.0) - } -} - -impl SubAssign for TimeDiff { - fn sub_assign(&mut self, rhs: TimeDiff) { - self.0 -= rhs.0; - } -} - -impl Mul for TimeDiff { - type Output = TimeDiff; - - fn mul(self, rhs: u64) -> TimeDiff { - TimeDiff(self.0 * rhs) - } -} - -impl Div for TimeDiff { - type Output = TimeDiff; - - fn div(self, rhs: u64) -> TimeDiff { - TimeDiff(self.0 / rhs) - } -} - -impl Div for TimeDiff { - type Output = u64; - - fn div(self, rhs: TimeDiff) -> u64 { - self.0 / rhs.0 - } -} - -impl From for Duration { - fn from(diff: TimeDiff) -> Duration { - Duration::from_millis(diff.0) - } -} - -#[cfg(any(feature = "std", test))] -impl Serialize for TimeDiff { - fn serialize(&self, serializer: S) -> Result { - if serializer.is_human_readable() { - self.to_string().serialize(serializer) - } else { - self.0.serialize(serializer) - } - } -} - -#[cfg(any(feature = "std", test))] -impl<'de> Deserialize<'de> for TimeDiff { - fn deserialize>(deserializer: D) -> Result { - if deserializer.is_human_readable() { - let value_as_string = String::deserialize(deserializer)?; - TimeDiff::from_str(&value_as_string).map_err(SerdeError::custom) - } else { - let inner = u64::deserialize(deserializer)?; - Ok(TimeDiff(inner)) - } - } -} - -impl ToBytes for TimeDiff { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - self.0.to_bytes() - } - - fn serialized_length(&self) -> usize { - self.0.serialized_length() - } -} - -impl FromBytes for TimeDiff { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - u64::from_bytes(bytes).map(|(inner, remainder)| (TimeDiff(inner), remainder)) - } -} - -impl From for TimeDiff { - fn from(duration: Duration) -> TimeDiff { - TimeDiff(duration.as_millis() as u64) - } -} - -/// A module for the `[serde(with = serde_option_time_diff)]` attribute, to serialize and -/// deserialize `Option` treating `None` as 0. -#[cfg(any(feature = "std", test))] -pub mod serde_option_time_diff { - use super::*; - - /// Serializes an `Option`, using `0` if the value is `None`. - pub fn serialize( - maybe_td: &Option, - serializer: S, - ) -> Result { - maybe_td - .unwrap_or_else(|| TimeDiff::from_millis(0)) - .serialize(serializer) - } - - /// Deserializes an `Option`, returning `None` if the value is `0`. - pub fn deserialize<'de, D: Deserializer<'de>>( - deserializer: D, - ) -> Result, D::Error> { - let td = TimeDiff::deserialize(deserializer)?; - if td.0 == 0 { - Ok(None) - } else { - Ok(Some(td)) - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn timestamp_serialization_roundtrip() { - let timestamp = Timestamp::now(); - - let timestamp_as_string = timestamp.to_string(); - assert_eq!( - timestamp, - Timestamp::from_str(×tamp_as_string).unwrap() - ); - - let serialized_json = serde_json::to_string(×tamp).unwrap(); - assert_eq!(timestamp, serde_json::from_str(&serialized_json).unwrap()); - - let serialized_bincode = bincode::serialize(×tamp).unwrap(); - assert_eq!( - timestamp, - bincode::deserialize(&serialized_bincode).unwrap() - ); - - bytesrepr::test_serialization_roundtrip(×tamp); - } - - #[test] - fn timediff_serialization_roundtrip() { - let mut rng = TestRng::new(); - let timediff = TimeDiff(rng.gen()); - - let timediff_as_string = timediff.to_string(); - assert_eq!(timediff, TimeDiff::from_str(&timediff_as_string).unwrap()); - - let serialized_json = serde_json::to_string(&timediff).unwrap(); - assert_eq!(timediff, serde_json::from_str(&serialized_json).unwrap()); - - let serialized_bincode = bincode::serialize(&timediff).unwrap(); - assert_eq!(timediff, bincode::deserialize(&serialized_bincode).unwrap()); - - bytesrepr::test_serialization_roundtrip(&timediff); - } - - #[test] - fn does_not_crash_for_big_timestamp_value() { - assert!(Timestamp::MAX.to_string().starts_with("Invalid timestamp:")); - } -} diff --git a/casper_types_ver_2_0/src/transaction.rs b/casper_types_ver_2_0/src/transaction.rs deleted file mode 100644 index 3583e142..00000000 --- a/casper_types_ver_2_0/src/transaction.rs +++ /dev/null @@ -1,340 +0,0 @@ -mod addressable_entity_identifier; -mod deploy; -mod execution_info; -mod finalized_approvals; -mod initiator_addr; -#[cfg(any(feature = "std", test))] -mod initiator_addr_and_secret_key; -mod package_identifier; -mod pricing_mode; -mod runtime_args; -mod transaction_approvals_hash; -mod transaction_entry_point; -mod transaction_hash; -mod transaction_header; -mod transaction_id; -mod transaction_invocation_target; -mod transaction_runtime; -mod transaction_scheduling; -mod transaction_session_kind; -mod transaction_target; -mod transaction_v1; - -use alloc::{collections::BTreeSet, vec::Vec}; -use core::fmt::{self, Debug, Display, Formatter}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(feature = "json-schema")] -use once_cell::sync::Lazy; -#[cfg(any(all(feature = "std", feature = "testing"), test))] -use rand::Rng; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -#[cfg(any(feature = "std", test))] -use serde::{Deserialize, Serialize}; -use tracing::error; - -#[cfg(any(all(feature = "std", feature = "testing"), test))] -use crate::testing::TestRng; -use crate::{ - account::AccountHash, - bytesrepr::{self, FromBytes, ToBytes, U8_SERIALIZED_LENGTH}, - Digest, Timestamp, -}; -#[cfg(feature = "json-schema")] -use crate::{account::ACCOUNT_HASH_LENGTH, SecretKey, TimeDiff, URef}; -pub use addressable_entity_identifier::AddressableEntityIdentifier; -pub use deploy::{ - Deploy, DeployApproval, DeployApprovalsHash, DeployConfigFailure, DeployDecodeFromJsonError, - DeployError, DeployExcessiveSizeError, DeployFootprint, DeployHash, DeployHeader, DeployId, - ExecutableDeployItem, ExecutableDeployItemIdentifier, FinalizedDeployApprovals, TransferTarget, -}; -#[cfg(any(feature = "std", test))] -pub use deploy::{DeployBuilder, DeployBuilderError}; -pub use execution_info::ExecutionInfo; -pub use finalized_approvals::FinalizedApprovals; -pub use initiator_addr::InitiatorAddr; -#[cfg(any(feature = "std", test))] -use initiator_addr_and_secret_key::InitiatorAddrAndSecretKey; -pub use package_identifier::PackageIdentifier; -pub use pricing_mode::PricingMode; -pub use runtime_args::{NamedArg, RuntimeArgs}; -pub use transaction_approvals_hash::TransactionApprovalsHash; -pub use transaction_entry_point::TransactionEntryPoint; -pub use transaction_hash::TransactionHash; -pub use transaction_header::TransactionHeader; -pub use transaction_id::TransactionId; -pub use transaction_invocation_target::TransactionInvocationTarget; -pub use transaction_runtime::TransactionRuntime; -pub use transaction_scheduling::TransactionScheduling; -pub use transaction_session_kind::TransactionSessionKind; -pub use transaction_target::TransactionTarget; -pub use transaction_v1::{ - FinalizedTransactionV1Approvals, TransactionV1, TransactionV1Approval, - TransactionV1ApprovalsHash, TransactionV1Body, TransactionV1ConfigFailure, - TransactionV1DecodeFromJsonError, TransactionV1Error, TransactionV1ExcessiveSizeError, - TransactionV1Hash, TransactionV1Header, -}; -#[cfg(any(feature = "std", test))] -pub use transaction_v1::{TransactionV1Builder, TransactionV1BuilderError}; - -const DEPLOY_TAG: u8 = 0; -const V1_TAG: u8 = 1; - -#[cfg(feature = "json-schema")] -pub(super) static TRANSACTION: Lazy = Lazy::new(|| { - let secret_key = SecretKey::example(); - let source = URef::from_formatted_str( - "uref-0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a-007", - ) - .unwrap(); - let target = URef::from_formatted_str( - "uref-1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b-000", - ) - .unwrap(); - let to = Some(AccountHash::new([40; ACCOUNT_HASH_LENGTH])); - let id = Some(999); - - let v1_txn = TransactionV1Builder::new_transfer(source, target, 30_000_000_000_u64, to, id) - .unwrap() - .with_chain_name("casper-example") - .with_timestamp(*Timestamp::example()) - .with_ttl(TimeDiff::from_seconds(3_600)) - .with_secret_key(secret_key) - .build() - .unwrap(); - Transaction::V1(v1_txn) -}); - -/// A versioned wrapper for a transaction or deploy. -#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] -#[cfg_attr( - any(feature = "std", test), - derive(Serialize, Deserialize), - serde(deny_unknown_fields) -)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -pub enum Transaction { - /// A deploy. - Deploy(Deploy), - /// A version 1 transaction. - #[cfg_attr(any(feature = "std", test), serde(rename = "Version1"))] - V1(TransactionV1), -} - -impl Transaction { - /// Returns the `TransactionHash` identifying this transaction. - pub fn hash(&self) -> TransactionHash { - match self { - Transaction::Deploy(deploy) => TransactionHash::from(*deploy.hash()), - Transaction::V1(txn) => TransactionHash::from(*txn.hash()), - } - } - - /// Returns the computed approvals hash identifying this transaction's approvals. - pub fn compute_approvals_hash(&self) -> Result { - let approvals_hash = match self { - Transaction::Deploy(deploy) => { - TransactionApprovalsHash::Deploy(deploy.compute_approvals_hash()?) - } - Transaction::V1(txn) => TransactionApprovalsHash::V1(txn.compute_approvals_hash()?), - }; - Ok(approvals_hash) - } - - /// Returns the computed `TransactionId` uniquely identifying this transaction and its - /// approvals. - pub fn compute_id(&self) -> TransactionId { - match self { - Transaction::Deploy(deploy) => { - let deploy_hash = *deploy.hash(); - let approvals_hash = deploy.compute_approvals_hash().unwrap_or_else(|error| { - error!(%error, "failed to serialize deploy approvals"); - DeployApprovalsHash::from(Digest::default()) - }); - TransactionId::new_deploy(deploy_hash, approvals_hash) - } - Transaction::V1(txn) => { - let txn_hash = *txn.hash(); - let approvals_hash = txn.compute_approvals_hash().unwrap_or_else(|error| { - error!(%error, "failed to serialize transaction approvals"); - TransactionV1ApprovalsHash::from(Digest::default()) - }); - TransactionId::new_v1(txn_hash, approvals_hash) - } - } - } - - /// Returns the address of the initiator of the transaction. - pub fn initiator_addr(&self) -> InitiatorAddr { - match self { - Transaction::Deploy(deploy) => InitiatorAddr::PublicKey(deploy.account().clone()), - Transaction::V1(txn) => txn.initiator_addr().clone(), - } - } - - /// Returns `true` if the transaction has expired. - pub fn expired(&self, current_instant: Timestamp) -> bool { - match self { - Transaction::Deploy(deploy) => deploy.expired(current_instant), - Transaction::V1(txn) => txn.expired(current_instant), - } - } - - /// Returns the timestamp of when the transaction expires, i.e. `self.timestamp + self.ttl`. - pub fn expires(&self) -> Timestamp { - match self { - Transaction::Deploy(deploy) => deploy.header().expires(), - Transaction::V1(txn) => txn.header().expires(), - } - } - - /// Returns the set of account hashes corresponding to the public keys of the approvals. - pub fn signers(&self) -> BTreeSet { - match self { - Transaction::Deploy(deploy) => deploy - .approvals() - .iter() - .map(|approval| approval.signer().to_account_hash()) - .collect(), - Transaction::V1(txn) => txn - .approvals() - .iter() - .map(|approval| approval.signer().to_account_hash()) - .collect(), - } - } - - // This method is not intended to be used by third party crates. - #[doc(hidden)] - #[cfg(feature = "json-schema")] - pub fn example() -> &'static Self { - &TRANSACTION - } - - /// Returns a random, valid but possibly expired transaction. - #[cfg(any(all(feature = "std", feature = "testing"), test))] - pub fn random(rng: &mut TestRng) -> Self { - if rng.gen() { - Transaction::Deploy(Deploy::random_valid_native_transfer(rng)) - } else { - Transaction::V1(TransactionV1::random(rng)) - } - } -} - -impl From for Transaction { - fn from(deploy: Deploy) -> Self { - Self::Deploy(deploy) - } -} - -impl From for Transaction { - fn from(txn: TransactionV1) -> Self { - Self::V1(txn) - } -} - -impl ToBytes for Transaction { - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - match self { - Transaction::Deploy(deploy) => { - DEPLOY_TAG.write_bytes(writer)?; - deploy.write_bytes(writer) - } - Transaction::V1(txn) => { - V1_TAG.write_bytes(writer)?; - txn.write_bytes(writer) - } - } - } - - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - U8_SERIALIZED_LENGTH - + match self { - Transaction::Deploy(deploy) => deploy.serialized_length(), - Transaction::V1(txn) => txn.serialized_length(), - } - } -} - -impl FromBytes for Transaction { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (tag, remainder) = u8::from_bytes(bytes)?; - match tag { - DEPLOY_TAG => { - let (deploy, remainder) = Deploy::from_bytes(remainder)?; - Ok((Transaction::Deploy(deploy), remainder)) - } - V1_TAG => { - let (txn, remainder) = TransactionV1::from_bytes(remainder)?; - Ok((Transaction::V1(txn), remainder)) - } - _ => Err(bytesrepr::Error::Formatting), - } - } -} - -impl Display for Transaction { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - match self { - Transaction::Deploy(deploy) => Display::fmt(deploy, formatter), - Transaction::V1(txn) => Display::fmt(txn, formatter), - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::testing::TestRng; - - #[test] - fn json_roundtrip() { - let rng = &mut TestRng::new(); - - let transaction = Transaction::from(Deploy::random(rng)); - let json_string = serde_json::to_string_pretty(&transaction).unwrap(); - let decoded = serde_json::from_str(&json_string).unwrap(); - assert_eq!(transaction, decoded); - - let transaction = Transaction::from(TransactionV1::random(rng)); - let json_string = serde_json::to_string_pretty(&transaction).unwrap(); - let decoded = serde_json::from_str(&json_string).unwrap(); - assert_eq!(transaction, decoded); - } - - #[test] - fn bincode_roundtrip() { - let rng = &mut TestRng::new(); - - let transaction = Transaction::from(Deploy::random(rng)); - let serialized = bincode::serialize(&transaction).unwrap(); - let deserialized = bincode::deserialize(&serialized).unwrap(); - assert_eq!(transaction, deserialized); - - let transaction = Transaction::from(TransactionV1::random(rng)); - let serialized = bincode::serialize(&transaction).unwrap(); - let deserialized = bincode::deserialize(&serialized).unwrap(); - assert_eq!(transaction, deserialized); - } - - #[test] - fn bytesrepr_roundtrip() { - let rng = &mut TestRng::new(); - - let transaction = Transaction::from(Deploy::random(rng)); - bytesrepr::test_serialization_roundtrip(&transaction); - - let transaction = Transaction::from(TransactionV1::random(rng)); - bytesrepr::test_serialization_roundtrip(&transaction); - } -} diff --git a/casper_types_ver_2_0/src/transaction/addressable_entity_identifier.rs b/casper_types_ver_2_0/src/transaction/addressable_entity_identifier.rs deleted file mode 100644 index bf588473..00000000 --- a/casper_types_ver_2_0/src/transaction/addressable_entity_identifier.rs +++ /dev/null @@ -1,122 +0,0 @@ -use alloc::{string::String, vec::Vec}; -use core::fmt::{self, Debug, Display, Formatter}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(any(feature = "testing", test))] -use rand::Rng; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -#[cfg(doc)] -use super::{ExecutableDeployItem, TransactionTarget}; -#[cfg(any(feature = "testing", test))] -use crate::testing::TestRng; -use crate::{ - bytesrepr::{self, FromBytes, ToBytes, U8_SERIALIZED_LENGTH}, - AddressableEntityHash, -}; - -const HASH_TAG: u8 = 0; -const NAME_TAG: u8 = 1; - -/// Identifier for the contract object within a [`TransactionTarget::Stored`] or an -/// [`ExecutableDeployItem`]. -#[derive(Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr( - feature = "json-schema", - derive(JsonSchema), - schemars( - description = "Identifier for the contract object within a `Stored` transaction target \ - or an `ExecutableDeployItem`." - ) -)] -#[serde(deny_unknown_fields)] -pub enum AddressableEntityIdentifier { - /// The hash identifying the addressable entity. - Hash(AddressableEntityHash), - /// The name identifying the addressable entity. - Name(String), -} - -impl AddressableEntityIdentifier { - /// Returns a random `AddressableEntityIdentifier`. - #[cfg(any(feature = "testing", test))] - pub fn random(rng: &mut TestRng) -> Self { - if rng.gen() { - AddressableEntityIdentifier::Hash(AddressableEntityHash::new(rng.gen())) - } else { - AddressableEntityIdentifier::Name(rng.random_string(1..21)) - } - } -} - -impl Display for AddressableEntityIdentifier { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - match self { - AddressableEntityIdentifier::Hash(hash) => write!(formatter, "entity-hash({})", hash), - AddressableEntityIdentifier::Name(name) => write!(formatter, "entity-name({})", name), - } - } -} - -impl ToBytes for AddressableEntityIdentifier { - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - match self { - AddressableEntityIdentifier::Hash(hash) => { - HASH_TAG.write_bytes(writer)?; - hash.write_bytes(writer) - } - AddressableEntityIdentifier::Name(name) => { - NAME_TAG.write_bytes(writer)?; - name.write_bytes(writer) - } - } - } - - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - U8_SERIALIZED_LENGTH - + match self { - AddressableEntityIdentifier::Hash(hash) => hash.serialized_length(), - AddressableEntityIdentifier::Name(name) => name.serialized_length(), - } - } -} - -impl FromBytes for AddressableEntityIdentifier { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (tag, remainder) = u8::from_bytes(bytes)?; - match tag { - HASH_TAG => { - let (hash, remainder) = AddressableEntityHash::from_bytes(remainder)?; - Ok((AddressableEntityIdentifier::Hash(hash), remainder)) - } - NAME_TAG => { - let (name, remainder) = String::from_bytes(remainder)?; - Ok((AddressableEntityIdentifier::Name(name), remainder)) - } - _ => Err(bytesrepr::Error::Formatting), - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn bytesrepr_roundtrip() { - let rng = &mut TestRng::new(); - for _ in 0..10 { - bytesrepr::test_serialization_roundtrip(&AddressableEntityIdentifier::random(rng)); - } - } -} diff --git a/casper_types_ver_2_0/src/transaction/deploy.rs b/casper_types_ver_2_0/src/transaction/deploy.rs deleted file mode 100644 index d93bd489..00000000 --- a/casper_types_ver_2_0/src/transaction/deploy.rs +++ /dev/null @@ -1,2007 +0,0 @@ -mod deploy_approval; -mod deploy_approvals_hash; -#[cfg(any(feature = "std", test))] -mod deploy_builder; -mod deploy_footprint; -mod deploy_hash; -mod deploy_header; -mod deploy_id; -mod error; -mod executable_deploy_item; -mod finalized_deploy_approvals; - -use alloc::{collections::BTreeSet, vec::Vec}; -use core::{ - cmp, - fmt::{self, Debug, Display, Formatter}, - hash, -}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -pub use finalized_deploy_approvals::FinalizedDeployApprovals; -#[cfg(any(feature = "once_cell", test))] -use once_cell::sync::OnceCell; -#[cfg(any(feature = "std", test))] -use { - super::{InitiatorAddr, InitiatorAddrAndSecretKey}, - itertools::Itertools, - serde::{Deserialize, Serialize}, -}; -#[cfg(any(all(feature = "std", feature = "testing"), test))] -use { - crate::{ - bytesrepr::Bytes, - system::auction::{ - ARG_AMOUNT as ARG_AUCTION_AMOUNT, ARG_DELEGATOR, ARG_NEW_VALIDATOR, - ARG_PUBLIC_KEY as ARG_AUCTION_PUBLIC_KEY, ARG_VALIDATOR, METHOD_DELEGATE, - METHOD_REDELEGATE, METHOD_UNDELEGATE, METHOD_WITHDRAW_BID, - }, - AddressableEntityHash, - {system::mint::ARG_AMOUNT, TransactionConfig, U512}, - {testing::TestRng, DEFAULT_MAX_PAYMENT_MOTES, DEFAULT_MIN_TRANSFER_MOTES}, - }, - rand::{Rng, RngCore}, - tracing::{debug, warn}, -}; -#[cfg(feature = "json-schema")] -use {once_cell::sync::Lazy, schemars::JsonSchema}; - -#[cfg(any( - all(feature = "std", feature = "testing"), - feature = "json-schema", - test -))] -use crate::runtime_args; -#[cfg(any(all(feature = "std", feature = "testing"), test))] -use crate::RuntimeArgs; -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - crypto, Digest, DisplayIter, PublicKey, SecretKey, TimeDiff, Timestamp, -}; - -pub use deploy_approval::DeployApproval; -pub use deploy_approvals_hash::DeployApprovalsHash; -#[cfg(any(feature = "std", test))] -pub use deploy_builder::{DeployBuilder, DeployBuilderError}; -pub use deploy_footprint::DeployFootprint; -pub use deploy_hash::DeployHash; -pub use deploy_header::DeployHeader; -pub use deploy_id::DeployId; -pub use error::{ - DecodeFromJsonError as DeployDecodeFromJsonError, DeployConfigFailure, Error as DeployError, - ExcessiveSizeError as DeployExcessiveSizeError, -}; -pub use executable_deploy_item::{ - ExecutableDeployItem, ExecutableDeployItemIdentifier, TransferTarget, -}; - -#[cfg(feature = "json-schema")] -static DEPLOY: Lazy = Lazy::new(|| { - let payment_args = runtime_args! { - "amount" => 1000 - }; - let payment = ExecutableDeployItem::StoredContractByName { - name: String::from("casper-example"), - entry_point: String::from("example-entry-point"), - args: payment_args, - }; - let session_args = runtime_args! { - "amount" => 1000 - }; - let session = ExecutableDeployItem::Transfer { args: session_args }; - let serialized_body = serialize_body(&payment, &session); - let body_hash = Digest::hash(serialized_body); - - let secret_key = SecretKey::example(); - let timestamp = *Timestamp::example(); - let header = DeployHeader::new( - PublicKey::from(secret_key), - timestamp, - TimeDiff::from_seconds(3_600), - 1, - body_hash, - vec![DeployHash::new(Digest::from([1u8; Digest::LENGTH]))], - String::from("casper-example"), - ); - let serialized_header = serialize_header(&header); - let hash = DeployHash::new(Digest::hash(serialized_header)); - - let mut approvals = BTreeSet::new(); - let approval = DeployApproval::create(&hash, secret_key); - approvals.insert(approval); - - Deploy { - hash, - header, - payment, - session, - approvals, - is_valid: OnceCell::new(), - } -}); - -/// A signed smart contract. -/// -/// To construct a new `Deploy`, use a [`DeployBuilder`]. -#[derive(Clone, Eq, Debug)] -#[cfg_attr( - any(feature = "std", test), - derive(Serialize, Deserialize), - serde(deny_unknown_fields) -)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr( - feature = "json-schema", - derive(JsonSchema), - schemars(description = "A signed smart contract.") -)] -pub struct Deploy { - hash: DeployHash, - header: DeployHeader, - payment: ExecutableDeployItem, - session: ExecutableDeployItem, - approvals: BTreeSet, - #[cfg_attr(any(all(feature = "std", feature = "once_cell"), test), serde(skip))] - #[cfg_attr( - all(any(feature = "once_cell", test), feature = "datasize"), - data_size(skip) - )] - #[cfg(any(feature = "once_cell", test))] - is_valid: OnceCell>, -} - -impl Deploy { - /// Called by the `DeployBuilder` to construct a new `Deploy`. - #[cfg(any(feature = "std", test))] - #[allow(clippy::too_many_arguments)] - fn build( - timestamp: Timestamp, - ttl: TimeDiff, - gas_price: u64, - dependencies: Vec, - chain_name: String, - payment: ExecutableDeployItem, - session: ExecutableDeployItem, - initiator_addr_and_secret_key: InitiatorAddrAndSecretKey, - ) -> Deploy { - let serialized_body = serialize_body(&payment, &session); - let body_hash = Digest::hash(serialized_body); - - let account = match initiator_addr_and_secret_key.initiator_addr() { - InitiatorAddr::PublicKey(public_key) => public_key, - InitiatorAddr::AccountHash(_) | InitiatorAddr::EntityAddr(_) => unreachable!(), - }; - - let dependencies = dependencies.into_iter().unique().collect(); - let header = DeployHeader::new( - account, - timestamp, - ttl, - gas_price, - body_hash, - dependencies, - chain_name, - ); - let serialized_header = serialize_header(&header); - let hash = DeployHash::new(Digest::hash(serialized_header)); - - let mut deploy = Deploy { - hash, - header, - payment, - session, - approvals: BTreeSet::new(), - #[cfg(any(feature = "once_cell", test))] - is_valid: OnceCell::new(), - }; - - if let Some(secret_key) = initiator_addr_and_secret_key.secret_key() { - deploy.sign(secret_key); - } - deploy - } - - /// Returns the `DeployHash` identifying this `Deploy`. - pub fn hash(&self) -> &DeployHash { - &self.hash - } - - /// Returns the public key of the account providing the context in which to run the `Deploy`. - pub fn account(&self) -> &PublicKey { - self.header.account() - } - - /// Returns the creation timestamp of the `Deploy`. - pub fn timestamp(&self) -> Timestamp { - self.header.timestamp() - } - - /// Returns the duration after the creation timestamp for which the `Deploy` will stay valid. - /// - /// After this duration has ended, the `Deploy` will be considered expired. - pub fn ttl(&self) -> TimeDiff { - self.header.ttl() - } - - /// Returns `true` if the `Deploy` has expired. - pub fn expired(&self, current_instant: Timestamp) -> bool { - self.header.expired(current_instant) - } - - /// Returns the price per gas unit for the `Deploy`. - pub fn gas_price(&self) -> u64 { - self.header.gas_price() - } - - /// Returns the hash of the body (i.e. the Wasm code) of the `Deploy`. - pub fn body_hash(&self) -> &Digest { - self.header.body_hash() - } - - /// Returns the name of the chain the `Deploy` should be executed on. - pub fn chain_name(&self) -> &str { - self.header.chain_name() - } - - /// Returns a reference to the `DeployHeader` of this `Deploy`. - pub fn header(&self) -> &DeployHeader { - &self.header - } - - /// Consumes `self`, returning the `DeployHeader` of this `Deploy`. - pub fn take_header(self) -> DeployHeader { - self.header - } - - /// Returns the `ExecutableDeployItem` for payment code. - pub fn payment(&self) -> &ExecutableDeployItem { - &self.payment - } - - /// Returns the `ExecutableDeployItem` for session code. - pub fn session(&self) -> &ExecutableDeployItem { - &self.session - } - - /// Returns the `Approval`s for this deploy. - pub fn approvals(&self) -> &BTreeSet { - &self.approvals - } - - /// Adds a signature of this `Deploy`'s hash to its approvals. - pub fn sign(&mut self, secret_key: &SecretKey) { - let approval = DeployApproval::create(&self.hash, secret_key); - self.approvals.insert(approval); - } - - /// Returns the `ApprovalsHash` of this `Deploy`'s approvals. - pub fn compute_approvals_hash(&self) -> Result { - DeployApprovalsHash::compute(&self.approvals) - } - - /// Returns `true` if the serialized size of the deploy is not greater than - /// `max_transaction_size`. - #[cfg(any(feature = "std", test))] - pub fn is_valid_size(&self, max_transaction_size: u32) -> Result<(), DeployExcessiveSizeError> { - let deploy_size = self.serialized_length(); - if deploy_size > max_transaction_size as usize { - return Err(DeployExcessiveSizeError { - max_transaction_size, - actual_deploy_size: deploy_size, - }); - } - Ok(()) - } - - /// Returns `Ok` if and only if this `Deploy`'s body hashes to the value of `body_hash()`, and - /// if this `Deploy`'s header hashes to the value claimed as the deploy hash. - pub fn has_valid_hash(&self) -> Result<(), DeployConfigFailure> { - let serialized_body = serialize_body(&self.payment, &self.session); - let body_hash = Digest::hash(serialized_body); - if body_hash != *self.header.body_hash() { - #[cfg(any(all(feature = "std", feature = "testing"), test))] - warn!(?self, ?body_hash, "invalid deploy body hash"); - return Err(DeployConfigFailure::InvalidBodyHash); - } - - let serialized_header = serialize_header(&self.header); - let hash = DeployHash::new(Digest::hash(serialized_header)); - if hash != self.hash { - #[cfg(any(all(feature = "std", feature = "testing"), test))] - warn!(?self, ?hash, "invalid deploy hash"); - return Err(DeployConfigFailure::InvalidDeployHash); - } - Ok(()) - } - - /// Returns `Ok` if and only if: - /// * the deploy hash is correct (should be the hash of the header), and - /// * the body hash is correct (should be the hash of the body), and - /// * approvals are non empty, and - /// * all approvals are valid signatures of the deploy hash - pub fn is_valid(&self) -> Result<(), DeployConfigFailure> { - #[cfg(any(feature = "once_cell", test))] - return self.is_valid.get_or_init(|| validate_deploy(self)).clone(); - - #[cfg(not(any(feature = "once_cell", test)))] - validate_deploy(self) - } - - /// Returns the `DeployFootprint`. - pub fn footprint(&self) -> Result { - let header = self.header().clone(); - let gas_estimate = match self.payment().payment_amount(header.gas_price()) { - Some(gas) => gas, - None => { - return Err(DeployError::InvalidPayment); - } - }; - let size_estimate = self.serialized_length(); - let is_transfer = self.session.is_transfer(); - Ok(DeployFootprint { - header, - gas_estimate, - size_estimate, - is_transfer, - }) - } - - /// Returns `Ok` if and only if: - /// * the chain_name is correct, - /// * the configured parameters are complied with at the given timestamp - #[cfg(any(all(feature = "std", feature = "testing"), test))] - pub fn is_config_compliant( - &self, - chain_name: &str, - config: &TransactionConfig, - max_associated_keys: u32, - timestamp_leeway: TimeDiff, - at: Timestamp, - ) -> Result<(), DeployConfigFailure> { - self.is_valid_size(config.max_transaction_size)?; - - let header = self.header(); - if header.chain_name() != chain_name { - debug!( - deploy_hash = %self.hash(), - deploy_header = %header, - chain_name = %header.chain_name(), - "invalid chain identifier" - ); - return Err(DeployConfigFailure::InvalidChainName { - expected: chain_name.to_string(), - got: header.chain_name().to_string(), - }); - } - - header.is_valid(config, timestamp_leeway, at, &self.hash)?; - - if self.approvals.len() > max_associated_keys as usize { - debug!( - deploy_hash = %self.hash(), - number_of_associated_keys = %self.approvals.len(), - max_associated_keys = %max_associated_keys, - "number of associated keys exceeds the maximum limit" - ); - return Err(DeployConfigFailure::ExcessiveApprovals { - got: self.approvals.len() as u32, - max_associated_keys, - }); - } - - // Transfers have a fixed cost and won't blow the block gas limit. - // Other deploys can, therefore, statically check the payment amount - // associated with the deploy. - if !self.session().is_transfer() { - let value = self - .payment() - .args() - .get(ARG_AMOUNT) - .ok_or(DeployConfigFailure::MissingPaymentAmount)?; - let payment_amount = value - .clone() - .into_t::() - .map_err(|_| DeployConfigFailure::FailedToParsePaymentAmount)?; - if payment_amount > U512::from(config.block_gas_limit) { - debug!( - amount = %payment_amount, - block_gas_limit = %config.block_gas_limit, - "payment amount exceeds block gas limit" - ); - return Err(DeployConfigFailure::ExceededBlockGasLimit { - block_gas_limit: config.block_gas_limit, - got: Box::new(payment_amount), - }); - } - } - - let payment_args_length = self.payment().args().serialized_length(); - if payment_args_length > config.deploy_config.payment_args_max_length as usize { - debug!( - payment_args_length, - payment_args_max_length = config.deploy_config.payment_args_max_length, - "payment args excessive" - ); - return Err(DeployConfigFailure::ExcessivePaymentArgsLength { - max_length: config.deploy_config.payment_args_max_length as usize, - got: payment_args_length, - }); - } - - let session_args_length = self.session().args().serialized_length(); - if session_args_length > config.deploy_config.session_args_max_length as usize { - debug!( - session_args_length, - session_args_max_length = config.deploy_config.session_args_max_length, - "session args excessive" - ); - return Err(DeployConfigFailure::ExcessiveSessionArgsLength { - max_length: config.deploy_config.session_args_max_length as usize, - got: session_args_length, - }); - } - - if self.session().is_transfer() { - let item = self.session().clone(); - let attempted = item - .args() - .get(ARG_AMOUNT) - .ok_or_else(|| { - debug!("missing transfer 'amount' runtime argument"); - DeployConfigFailure::MissingTransferAmount - })? - .clone() - .into_t::() - .map_err(|_| { - debug!("failed to parse transfer 'amount' runtime argument as a U512"); - DeployConfigFailure::FailedToParseTransferAmount - })?; - let minimum = U512::from(config.native_transfer_minimum_motes); - if attempted < minimum { - debug!( - minimum = %config.native_transfer_minimum_motes, - amount = %attempted, - "insufficient transfer amount" - ); - return Err(DeployConfigFailure::InsufficientTransferAmount { - minimum: Box::new(minimum), - attempted: Box::new(attempted), - }); - } - } - - Ok(()) - } - - // This method is not intended to be used by third party crates. - // - // It is required to allow finalized approvals to be injected after reading a `Deploy` from - // storage. - #[doc(hidden)] - pub fn with_approvals(mut self, approvals: BTreeSet) -> Self { - self.approvals = approvals; - self - } - - // This method is not intended to be used by third party crates. - #[doc(hidden)] - #[cfg(feature = "json-schema")] - pub fn example() -> &'static Self { - &DEPLOY - } - - /// Constructs a new signed `Deploy`. - #[cfg(any(all(feature = "std", feature = "testing"), test))] - #[allow(clippy::too_many_arguments)] - pub fn new( - timestamp: Timestamp, - ttl: TimeDiff, - gas_price: u64, - dependencies: Vec, - chain_name: String, - payment: ExecutableDeployItem, - session: ExecutableDeployItem, - secret_key: &SecretKey, - account: Option, - ) -> Deploy { - let account_and_secret_key = match account { - Some(account) => InitiatorAddrAndSecretKey::Both { - initiator_addr: InitiatorAddr::PublicKey(account), - secret_key, - }, - None => InitiatorAddrAndSecretKey::SecretKey(secret_key), - }; - - Deploy::build( - timestamp, - ttl, - gas_price, - dependencies, - chain_name, - payment, - session, - account_and_secret_key, - ) - } - - /// Returns a random `Deploy`. - #[cfg(any(all(feature = "std", feature = "testing"), test))] - pub fn random(rng: &mut TestRng) -> Self { - let timestamp = Timestamp::random(rng); - let ttl = TimeDiff::from_seconds(rng.gen_range(60..300)); - Deploy::random_with_timestamp_and_ttl(rng, timestamp, ttl) - } - - /// Returns a random `Deploy` but using the specified `timestamp` and `ttl`. - #[cfg(any(all(feature = "std", feature = "testing"), test))] - pub fn random_with_timestamp_and_ttl( - rng: &mut TestRng, - timestamp: Timestamp, - ttl: TimeDiff, - ) -> Self { - let gas_price = rng.gen_range(1..100); - - let dependencies = vec![ - DeployHash::new(Digest::hash(rng.next_u64().to_le_bytes())), - DeployHash::new(Digest::hash(rng.next_u64().to_le_bytes())), - DeployHash::new(Digest::hash(rng.next_u64().to_le_bytes())), - ]; - let chain_name = String::from("casper-example"); - - // We need "amount" in order to be able to get correct info via `deploy_info()`. - let payment_args = runtime_args! { - "amount" => U512::from(DEFAULT_MAX_PAYMENT_MOTES), - }; - let payment = ExecutableDeployItem::StoredContractByName { - name: String::from("casper-example"), - entry_point: String::from("example-entry-point"), - args: payment_args, - }; - - let session = rng.gen(); - - let secret_key = SecretKey::random(rng); - - Deploy::new( - timestamp, - ttl, - gas_price, - dependencies, - chain_name, - payment, - session, - &secret_key, - None, - ) - } - - /// Turns `self` into an invalid `Deploy` by clearing the `chain_name`, invalidating the deploy - /// hash. - #[cfg(any(all(feature = "std", feature = "testing"), test))] - pub fn invalidate(&mut self) { - self.header.invalidate(); - } - - /// Returns a random `Deploy` for a native transfer. - #[cfg(any(all(feature = "std", feature = "testing"), test))] - pub fn random_valid_native_transfer(rng: &mut TestRng) -> Self { - let timestamp = Timestamp::now(); - let ttl = TimeDiff::from_seconds(rng.gen_range(60..300)); - Self::random_valid_native_transfer_with_timestamp_and_ttl(rng, timestamp, ttl) - } - - /// Returns a random `Deploy` for a native transfer with timestamp and ttl. - #[cfg(any(all(feature = "std", feature = "testing"), test))] - pub fn random_valid_native_transfer_with_timestamp_and_ttl( - rng: &mut TestRng, - timestamp: Timestamp, - ttl: TimeDiff, - ) -> Self { - let deploy = Self::random_with_timestamp_and_ttl(rng, timestamp, ttl); - let transfer_args = runtime_args! { - "amount" => U512::from(DEFAULT_MIN_TRANSFER_MOTES), - "source" => PublicKey::random(rng).to_account_hash(), - "target" => PublicKey::random(rng).to_account_hash(), - }; - let payment_args = runtime_args! { - "amount" => U512::from(10), - }; - let session = ExecutableDeployItem::Transfer { - args: transfer_args, - }; - let payment = ExecutableDeployItem::ModuleBytes { - module_bytes: Bytes::new(), - args: payment_args, - }; - let secret_key = SecretKey::random(rng); - Deploy::new( - timestamp, - ttl, - deploy.header.gas_price(), - deploy.header.dependencies().clone(), - deploy.header.chain_name().to_string(), - payment, - session, - &secret_key, - None, - ) - } - - /// Returns a random `Deploy` for a native transfer with no dependencies. - #[cfg(any(all(feature = "std", feature = "testing"), test))] - pub fn random_valid_native_transfer_without_deps(rng: &mut TestRng) -> Self { - let deploy = Self::random(rng); - let transfer_args = runtime_args! { - "amount" => U512::from(DEFAULT_MIN_TRANSFER_MOTES), - "source" => PublicKey::random(rng).to_account_hash(), - "target" => PublicKey::random(rng).to_account_hash(), - }; - let payment_args = runtime_args! { - "amount" => U512::from(10), - }; - let session = ExecutableDeployItem::Transfer { - args: transfer_args, - }; - let payment = ExecutableDeployItem::ModuleBytes { - module_bytes: Bytes::new(), - args: payment_args, - }; - let secret_key = SecretKey::random(rng); - Deploy::new( - Timestamp::now(), - deploy.header.ttl(), - deploy.header.gas_price(), - vec![], - deploy.header.chain_name().to_string(), - payment, - session, - &secret_key, - None, - ) - } - - /// Returns a random invalid `Deploy` without a payment amount specified. - #[cfg(any(all(feature = "std", feature = "testing"), test))] - pub fn random_without_payment_amount(rng: &mut TestRng) -> Self { - let payment = ExecutableDeployItem::ModuleBytes { - module_bytes: Bytes::new(), - args: RuntimeArgs::default(), - }; - Self::random_transfer_with_payment(rng, payment) - } - - /// Returns a random invalid `Deploy` with an invalid value for the payment amount. - #[cfg(any(all(feature = "std", feature = "testing"), test))] - pub fn random_with_mangled_payment_amount(rng: &mut TestRng) -> Self { - let payment_args = runtime_args! { - "amount" => "invalid-argument" - }; - let payment = ExecutableDeployItem::ModuleBytes { - module_bytes: Bytes::new(), - args: payment_args, - }; - Self::random_transfer_with_payment(rng, payment) - } - - /// Returns a random `Deploy` with custom payment specified as a stored contract by name. - #[cfg(any(all(feature = "std", feature = "testing"), test))] - pub fn random_with_valid_custom_payment_contract_by_name(rng: &mut TestRng) -> Self { - let payment = ExecutableDeployItem::StoredContractByName { - name: "Test".to_string(), - entry_point: "call".to_string(), - args: Default::default(), - }; - Self::random_transfer_with_payment(rng, payment) - } - - /// Returns a random invalid `Deploy` with custom payment specified as a stored contract by - /// hash, but missing the runtime args. - #[cfg(any(all(feature = "std", feature = "testing"), test))] - pub fn random_with_missing_payment_contract_by_hash(rng: &mut TestRng) -> Self { - let payment = ExecutableDeployItem::StoredContractByHash { - hash: [19; 32].into(), - entry_point: "call".to_string(), - args: Default::default(), - }; - Self::random_transfer_with_payment(rng, payment) - } - - /// Returns a random invalid `Deploy` with custom payment specified as a stored contract by - /// hash, but calling an invalid entry point. - #[cfg(any(all(feature = "std", feature = "testing"), test))] - pub fn random_with_missing_entry_point_in_payment_contract(rng: &mut TestRng) -> Self { - let payment = ExecutableDeployItem::StoredContractByHash { - hash: [19; 32].into(), - entry_point: "non-existent-entry-point".to_string(), - args: Default::default(), - }; - Self::random_transfer_with_payment(rng, payment) - } - - /// Returns a random `Deploy` with custom payment specified as a stored versioned contract by - /// name. - #[cfg(any(all(feature = "std", feature = "testing"), test))] - pub fn random_with_valid_custom_payment_package_by_name(rng: &mut TestRng) -> Self { - let payment = ExecutableDeployItem::StoredVersionedContractByName { - name: "Test".to_string(), - version: None, - entry_point: "call".to_string(), - args: Default::default(), - }; - Self::random_transfer_with_payment(rng, payment) - } - - /// Returns a random invalid `Deploy` with custom payment specified as a stored versioned - /// contract by hash, but missing the runtime args. - #[cfg(any(all(feature = "std", feature = "testing"), test))] - pub fn random_with_missing_payment_package_by_hash(rng: &mut TestRng) -> Self { - let payment = ExecutableDeployItem::StoredVersionedContractByHash { - hash: Default::default(), - version: None, - entry_point: "call".to_string(), - args: Default::default(), - }; - Self::random_transfer_with_payment(rng, payment) - } - - /// Returns a random invalid `Deploy` with custom payment specified as a stored versioned - /// contract by hash, but calling an invalid entry point. - #[cfg(any(all(feature = "std", feature = "testing"), test))] - pub fn random_with_nonexistent_contract_version_in_payment_package(rng: &mut TestRng) -> Self { - let payment = ExecutableDeployItem::StoredVersionedContractByHash { - hash: [19; 32].into(), - version: Some(6u32), - entry_point: "non-existent-entry-point".to_string(), - args: Default::default(), - }; - Self::random_transfer_with_payment(rng, payment) - } - - /// Returns a random `Deploy` with custom session specified as a stored contract by name. - #[cfg(any(all(feature = "std", feature = "testing"), test))] - pub fn random_with_valid_session_contract_by_name(rng: &mut TestRng) -> Self { - let session = ExecutableDeployItem::StoredContractByName { - name: "Test".to_string(), - entry_point: "call".to_string(), - args: Default::default(), - }; - Self::random_transfer_with_session(rng, session) - } - - /// Returns a random invalid `Deploy` with custom session specified as a stored contract by - /// hash, but missing the runtime args. - #[cfg(any(all(feature = "std", feature = "testing"), test))] - pub fn random_with_missing_session_contract_by_hash(rng: &mut TestRng) -> Self { - let session = ExecutableDeployItem::StoredContractByHash { - hash: Default::default(), - entry_point: "call".to_string(), - args: Default::default(), - }; - Self::random_transfer_with_session(rng, session) - } - - /// Returns a random invalid `Deploy` with custom session specified as a stored contract by - /// hash, but calling an invalid entry point. - #[cfg(any(all(feature = "std", feature = "testing"), test))] - pub fn random_with_missing_entry_point_in_session_contract(rng: &mut TestRng) -> Self { - let session = ExecutableDeployItem::StoredContractByHash { - hash: [19; 32].into(), - entry_point: "non-existent-entry-point".to_string(), - args: Default::default(), - }; - Self::random_transfer_with_session(rng, session) - } - - /// Returns a random `Deploy` with custom session specified as a stored versioned contract by - /// name. - #[cfg(any(all(feature = "std", feature = "testing"), test))] - pub fn random_with_valid_session_package_by_name(rng: &mut TestRng) -> Self { - let session = ExecutableDeployItem::StoredVersionedContractByName { - name: "Test".to_string(), - version: None, - entry_point: "call".to_string(), - args: Default::default(), - }; - Self::random_transfer_with_session(rng, session) - } - - /// Returns a random invalid `Deploy` with custom session specified as a stored versioned - /// contract by hash, but missing the runtime args. - #[cfg(any(all(feature = "std", feature = "testing"), test))] - pub fn random_with_missing_session_package_by_hash(rng: &mut TestRng) -> Self { - let session = ExecutableDeployItem::StoredVersionedContractByHash { - hash: Default::default(), - version: None, - entry_point: "call".to_string(), - args: Default::default(), - }; - Self::random_transfer_with_session(rng, session) - } - - /// Returns a random invalid `Deploy` with custom session specified as a stored versioned - /// contract by hash, but calling an invalid entry point. - #[cfg(any(all(feature = "std", feature = "testing"), test))] - pub fn random_with_nonexistent_contract_version_in_session_package(rng: &mut TestRng) -> Self { - let session = ExecutableDeployItem::StoredVersionedContractByHash { - hash: [19; 32].into(), - version: Some(6u32), - entry_point: "non-existent-entry-point".to_string(), - args: Default::default(), - }; - Self::random_transfer_with_session(rng, session) - } - - /// Returns a random invalid transfer `Deploy` with the "target" runtime arg missing. - #[cfg(any(all(feature = "std", feature = "testing"), test))] - pub fn random_without_transfer_target(rng: &mut TestRng) -> Self { - let transfer_args = runtime_args! { - "amount" => U512::from(DEFAULT_MIN_TRANSFER_MOTES), - "source" => PublicKey::random(rng).to_account_hash(), - }; - let session = ExecutableDeployItem::Transfer { - args: transfer_args, - }; - Self::random_transfer_with_session(rng, session) - } - - /// Returns a random invalid transfer `Deploy` with the "amount" runtime arg missing. - #[cfg(any(all(feature = "std", feature = "testing"), test))] - pub fn random_without_transfer_amount(rng: &mut TestRng) -> Self { - let transfer_args = runtime_args! { - "source" => PublicKey::random(rng).to_account_hash(), - "target" => PublicKey::random(rng).to_account_hash(), - }; - let session = ExecutableDeployItem::Transfer { - args: transfer_args, - }; - Self::random_transfer_with_session(rng, session) - } - - /// Returns a random invalid transfer `Deploy` with an invalid "amount" runtime arg. - #[cfg(any(all(feature = "std", feature = "testing"), test))] - pub fn random_with_mangled_transfer_amount(rng: &mut TestRng) -> Self { - let transfer_args = runtime_args! { - "amount" => "mangled-transfer-amount", - "source" => PublicKey::random(rng).to_account_hash(), - "target" => PublicKey::random(rng).to_account_hash(), - }; - let session = ExecutableDeployItem::Transfer { - args: transfer_args, - }; - Self::random_transfer_with_session(rng, session) - } - - /// Returns a random invalid `Deploy` with empty session bytes. - #[cfg(any(all(feature = "std", feature = "testing"), test))] - pub fn random_with_empty_session_module_bytes(rng: &mut TestRng) -> Self { - let session = ExecutableDeployItem::ModuleBytes { - module_bytes: Bytes::new(), - args: Default::default(), - }; - Self::random_transfer_with_session(rng, session) - } - - /// Returns a random invalid `Deploy` with an expired TTL. - #[cfg(any(all(feature = "std", feature = "testing"), test))] - pub fn random_expired_deploy(rng: &mut TestRng) -> Self { - let deploy = Self::random_valid_native_transfer(rng); - let secret_key = SecretKey::random(rng); - - Deploy::new( - Timestamp::zero(), - TimeDiff::from_seconds(1u32), - deploy.header.gas_price(), - deploy.header.dependencies().clone(), - deploy.header.chain_name().to_string(), - deploy.payment, - deploy.session, - &secret_key, - None, - ) - } - - /// Returns a random `Deploy` with native transfer as payment code. - #[cfg(any(all(feature = "std", feature = "testing"), test))] - pub fn random_with_native_transfer_in_payment_logic(rng: &mut TestRng) -> Self { - let transfer_args = runtime_args! { - "amount" => U512::from(DEFAULT_MIN_TRANSFER_MOTES), - "source" => PublicKey::random(rng).to_account_hash(), - "target" => PublicKey::random(rng).to_account_hash(), - }; - let payment = ExecutableDeployItem::Transfer { - args: transfer_args, - }; - Self::random_transfer_with_payment(rng, payment) - } - - #[cfg(any(all(feature = "std", feature = "testing"), test))] - fn random_transfer_with_payment(rng: &mut TestRng, payment: ExecutableDeployItem) -> Self { - let deploy = Self::random_valid_native_transfer(rng); - let secret_key = SecretKey::random(rng); - - Deploy::new( - deploy.header.timestamp(), - deploy.header.ttl(), - deploy.header.gas_price(), - deploy.header.dependencies().clone(), - deploy.header.chain_name().to_string(), - payment, - deploy.session, - &secret_key, - None, - ) - } - - #[cfg(any(all(feature = "std", feature = "testing"), test))] - fn random_transfer_with_session(rng: &mut TestRng, session: ExecutableDeployItem) -> Self { - let deploy = Self::random_valid_native_transfer(rng); - let secret_key = SecretKey::random(rng); - - Deploy::new( - deploy.header.timestamp(), - deploy.header.ttl(), - deploy.header.gas_price(), - deploy.header.dependencies().clone(), - deploy.header.chain_name().to_string(), - deploy.payment, - session, - &secret_key, - None, - ) - } - - /// Creates a withdraw bid deploy, for testing. - #[cfg(any(all(feature = "std", feature = "testing"), test))] - pub fn withdraw_bid( - chain_name: String, - auction_contract_hash: AddressableEntityHash, - public_key: PublicKey, - amount: U512, - timestamp: Timestamp, - ttl: TimeDiff, - ) -> Self { - let payment = ExecutableDeployItem::ModuleBytes { - module_bytes: Bytes::new(), - args: runtime_args! { ARG_AMOUNT => U512::from(3_000_000_000_u64) }, - }; - let args = runtime_args! { - ARG_AUCTION_AMOUNT => amount, - ARG_AUCTION_PUBLIC_KEY => public_key.clone(), - }; - let session = ExecutableDeployItem::StoredContractByHash { - hash: auction_contract_hash, - entry_point: METHOD_WITHDRAW_BID.to_string(), - args, - }; - - Deploy::build( - timestamp, - ttl, - 1, - vec![], - chain_name, - payment, - session, - InitiatorAddrAndSecretKey::InitiatorAddr(InitiatorAddr::PublicKey(public_key)), - ) - } - - /// Creates a delegate deploy, for testing. - #[cfg(any(all(feature = "std", feature = "testing"), test))] - pub fn delegate( - chain_name: String, - auction_contract_hash: AddressableEntityHash, - validator_public_key: PublicKey, - delegator_public_key: PublicKey, - amount: U512, - timestamp: Timestamp, - ttl: TimeDiff, - ) -> Self { - let payment = ExecutableDeployItem::ModuleBytes { - module_bytes: Bytes::new(), - args: runtime_args! { ARG_AMOUNT => U512::from(3_000_000_000_u64) }, - }; - let args = runtime_args! { - ARG_DELEGATOR => delegator_public_key.clone(), - ARG_VALIDATOR => validator_public_key, - ARG_AUCTION_AMOUNT => amount, - }; - let session = ExecutableDeployItem::StoredContractByHash { - hash: auction_contract_hash, - entry_point: METHOD_DELEGATE.to_string(), - args, - }; - - Deploy::build( - timestamp, - ttl, - 1, - vec![], - chain_name, - payment, - session, - InitiatorAddrAndSecretKey::InitiatorAddr(InitiatorAddr::PublicKey( - delegator_public_key, - )), - ) - } - - /// Creates an undelegate deploy, for testing. - #[cfg(any(all(feature = "std", feature = "testing"), test))] - pub fn undelegate( - chain_name: String, - auction_contract_hash: AddressableEntityHash, - validator_public_key: PublicKey, - delegator_public_key: PublicKey, - amount: U512, - timestamp: Timestamp, - ttl: TimeDiff, - ) -> Self { - let payment = ExecutableDeployItem::ModuleBytes { - module_bytes: Bytes::new(), - args: runtime_args! { ARG_AMOUNT => U512::from(3_000_000_000_u64) }, - }; - let args = runtime_args! { - ARG_DELEGATOR => delegator_public_key.clone(), - ARG_VALIDATOR => validator_public_key, - ARG_AUCTION_AMOUNT => amount, - }; - let session = ExecutableDeployItem::StoredContractByHash { - hash: auction_contract_hash, - entry_point: METHOD_UNDELEGATE.to_string(), - args, - }; - - Deploy::build( - timestamp, - ttl, - 1, - vec![], - chain_name, - payment, - session, - InitiatorAddrAndSecretKey::InitiatorAddr(InitiatorAddr::PublicKey( - delegator_public_key, - )), - ) - } - - /// Creates an redelegate deploy, for testing. - #[cfg(any(all(feature = "std", feature = "testing"), test))] - #[allow(clippy::too_many_arguments)] - pub fn redelegate( - chain_name: String, - auction_contract_hash: AddressableEntityHash, - validator_public_key: PublicKey, - delegator_public_key: PublicKey, - redelegate_validator_public_key: PublicKey, - amount: U512, - timestamp: Timestamp, - ttl: TimeDiff, - ) -> Self { - let payment = ExecutableDeployItem::ModuleBytes { - module_bytes: Bytes::new(), - args: runtime_args! { ARG_AMOUNT => U512::from(3_000_000_000_u64) }, - }; - let args = runtime_args! { - ARG_DELEGATOR => delegator_public_key.clone(), - ARG_VALIDATOR => validator_public_key, - ARG_NEW_VALIDATOR => redelegate_validator_public_key, - ARG_AUCTION_AMOUNT => amount, - }; - let session = ExecutableDeployItem::StoredContractByHash { - hash: auction_contract_hash, - entry_point: METHOD_REDELEGATE.to_string(), - args, - }; - - Deploy::build( - timestamp, - ttl, - 1, - vec![], - chain_name, - payment, - session, - InitiatorAddrAndSecretKey::InitiatorAddr(InitiatorAddr::PublicKey( - delegator_public_key, - )), - ) - } -} - -impl hash::Hash for Deploy { - fn hash(&self, state: &mut H) { - // Destructure to make sure we don't accidentally omit fields. - #[cfg(any(feature = "once_cell", test))] - let Deploy { - hash, - header, - payment, - session, - approvals, - is_valid: _, - } = self; - #[cfg(not(any(feature = "once_cell", test)))] - let Deploy { - hash, - header, - payment, - session, - approvals, - } = self; - hash.hash(state); - header.hash(state); - payment.hash(state); - session.hash(state); - approvals.hash(state); - } -} - -impl PartialEq for Deploy { - fn eq(&self, other: &Deploy) -> bool { - // Destructure to make sure we don't accidentally omit fields. - #[cfg(any(feature = "once_cell", test))] - let Deploy { - hash, - header, - payment, - session, - approvals, - is_valid: _, - } = self; - #[cfg(not(any(feature = "once_cell", test)))] - let Deploy { - hash, - header, - payment, - session, - approvals, - } = self; - *hash == other.hash - && *header == other.header - && *payment == other.payment - && *session == other.session - && *approvals == other.approvals - } -} - -impl Ord for Deploy { - fn cmp(&self, other: &Deploy) -> cmp::Ordering { - // Destructure to make sure we don't accidentally omit fields. - #[cfg(any(feature = "once_cell", test))] - let Deploy { - hash, - header, - payment, - session, - approvals, - is_valid: _, - } = self; - #[cfg(not(any(feature = "once_cell", test)))] - let Deploy { - hash, - header, - payment, - session, - approvals, - } = self; - hash.cmp(&other.hash) - .then_with(|| header.cmp(&other.header)) - .then_with(|| payment.cmp(&other.payment)) - .then_with(|| session.cmp(&other.session)) - .then_with(|| approvals.cmp(&other.approvals)) - } -} - -impl PartialOrd for Deploy { - fn partial_cmp(&self, other: &Deploy) -> Option { - Some(self.cmp(other)) - } -} - -impl ToBytes for Deploy { - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.header.write_bytes(writer)?; - self.hash.write_bytes(writer)?; - self.payment.write_bytes(writer)?; - self.session.write_bytes(writer)?; - self.approvals.write_bytes(writer) - } - - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - self.header.serialized_length() - + self.hash.serialized_length() - + self.payment.serialized_length() - + self.session.serialized_length() - + self.approvals.serialized_length() - } -} - -impl FromBytes for Deploy { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (header, remainder) = DeployHeader::from_bytes(bytes)?; - let (hash, remainder) = DeployHash::from_bytes(remainder)?; - let (payment, remainder) = ExecutableDeployItem::from_bytes(remainder)?; - let (session, remainder) = ExecutableDeployItem::from_bytes(remainder)?; - let (approvals, remainder) = BTreeSet::::from_bytes(remainder)?; - let maybe_valid_deploy = Deploy { - header, - hash, - payment, - session, - approvals, - #[cfg(any(feature = "once_cell", test))] - is_valid: OnceCell::new(), - }; - Ok((maybe_valid_deploy, remainder)) - } -} - -impl Display for Deploy { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - write!( - formatter, - "deploy[{}, {}, payment_code: {}, session_code: {}, approvals: {}]", - self.hash, - self.header, - self.payment, - self.session, - DisplayIter::new(self.approvals.iter()) - ) - } -} - -fn serialize_header(header: &DeployHeader) -> Vec { - header - .to_bytes() - .unwrap_or_else(|error| panic!("should serialize deploy header: {}", error)) -} - -fn serialize_body(payment: &ExecutableDeployItem, session: &ExecutableDeployItem) -> Vec { - let mut buffer = Vec::with_capacity(payment.serialized_length() + session.serialized_length()); - payment - .write_bytes(&mut buffer) - .unwrap_or_else(|error| panic!("should serialize payment code: {}", error)); - session - .write_bytes(&mut buffer) - .unwrap_or_else(|error| panic!("should serialize session code: {}", error)); - buffer -} - -/// Computationally expensive validity check for a given deploy instance, including asymmetric_key -/// signing verification. -fn validate_deploy(deploy: &Deploy) -> Result<(), DeployConfigFailure> { - if deploy.approvals.is_empty() { - #[cfg(any(all(feature = "std", feature = "testing"), test))] - warn!(?deploy, "deploy has no approvals"); - return Err(DeployConfigFailure::EmptyApprovals); - } - - deploy.has_valid_hash()?; - - for (index, approval) in deploy.approvals.iter().enumerate() { - if let Err(error) = crypto::verify(deploy.hash, approval.signature(), approval.signer()) { - #[cfg(any(all(feature = "std", feature = "testing"), test))] - warn!(?deploy, "failed to verify approval {}: {}", index, error); - return Err(DeployConfigFailure::InvalidApproval { index, error }); - } - } - - Ok(()) -} - -#[cfg(test)] -mod tests { - use std::{iter, time::Duration}; - - use super::*; - use crate::CLValue; - - const DEFAULT_MAX_ASSOCIATED_KEYS: u32 = 100; - - #[test] - fn json_roundtrip() { - let mut rng = TestRng::new(); - let deploy = Deploy::random(&mut rng); - let json_string = serde_json::to_string_pretty(&deploy).unwrap(); - let decoded = serde_json::from_str(&json_string).unwrap(); - assert_eq!(deploy, decoded); - } - - #[test] - fn bincode_roundtrip() { - let mut rng = TestRng::new(); - let deploy = Deploy::random(&mut rng); - let serialized = bincode::serialize(&deploy).unwrap(); - let deserialized = bincode::deserialize(&serialized).unwrap(); - assert_eq!(deploy, deserialized); - } - - #[test] - fn bytesrepr_roundtrip() { - let mut rng = TestRng::new(); - let deploy = Deploy::random(&mut rng); - bytesrepr::test_serialization_roundtrip(deploy.header()); - bytesrepr::test_serialization_roundtrip(&deploy); - } - - fn create_deploy( - rng: &mut TestRng, - ttl: TimeDiff, - dependency_count: usize, - chain_name: &str, - ) -> Deploy { - let secret_key = SecretKey::random(rng); - let dependencies = iter::repeat_with(|| DeployHash::random(rng)) - .take(dependency_count) - .collect(); - let transfer_args = { - let mut transfer_args = RuntimeArgs::new(); - let value = CLValue::from_t(U512::from(DEFAULT_MIN_TRANSFER_MOTES)) - .expect("should create CLValue"); - transfer_args.insert_cl_value("amount", value); - transfer_args - }; - Deploy::new( - Timestamp::now(), - ttl, - 1, - dependencies, - chain_name.to_string(), - ExecutableDeployItem::ModuleBytes { - module_bytes: Bytes::new(), - args: RuntimeArgs::new(), - }, - ExecutableDeployItem::Transfer { - args: transfer_args, - }, - &secret_key, - None, - ) - } - - #[test] - fn is_valid() { - let mut rng = TestRng::new(); - let deploy = create_deploy(&mut rng, TransactionConfig::default().max_ttl, 0, "net-1"); - assert_eq!( - deploy.is_valid.get(), - None, - "is valid should initially be None" - ); - deploy.is_valid().expect("should be valid"); - assert_eq!( - deploy.is_valid.get(), - Some(&Ok(())), - "is valid should be true" - ); - } - - fn check_is_not_valid(invalid_deploy: Deploy, expected_error: DeployConfigFailure) { - assert!( - invalid_deploy.is_valid.get().is_none(), - "is valid should initially be None" - ); - let actual_error = invalid_deploy.is_valid().unwrap_err(); - - // Ignore the `error_msg` field of `InvalidApproval` when comparing to expected error, as - // this makes the test too fragile. Otherwise expect the actual error should exactly match - // the expected error. - match expected_error { - DeployConfigFailure::InvalidApproval { - index: expected_index, - .. - } => match actual_error { - DeployConfigFailure::InvalidApproval { - index: actual_index, - .. - } => { - assert_eq!(actual_index, expected_index); - } - _ => panic!("expected {}, got: {}", expected_error, actual_error), - }, - _ => { - assert_eq!(actual_error, expected_error,); - } - } - - // The actual error should have been lazily initialized correctly. - assert_eq!( - invalid_deploy.is_valid.get(), - Some(&Err(actual_error)), - "is valid should now be Some" - ); - } - - #[test] - fn not_valid_due_to_invalid_body_hash() { - let mut rng = TestRng::new(); - let mut deploy = create_deploy(&mut rng, TransactionConfig::default().max_ttl, 0, "net-1"); - - deploy.session = ExecutableDeployItem::Transfer { - args: runtime_args! { - "amount" => 1 - }, - }; - check_is_not_valid(deploy, DeployConfigFailure::InvalidBodyHash); - } - - #[test] - fn not_valid_due_to_invalid_deploy_hash() { - let mut rng = TestRng::new(); - let mut deploy = create_deploy(&mut rng, TransactionConfig::default().max_ttl, 0, "net-1"); - - // deploy.header.gas_price = 2; - deploy.invalidate(); - check_is_not_valid(deploy, DeployConfigFailure::InvalidDeployHash); - } - - #[test] - fn not_valid_due_to_empty_approvals() { - let mut rng = TestRng::new(); - let mut deploy = create_deploy(&mut rng, TransactionConfig::default().max_ttl, 0, "net-1"); - deploy.approvals = BTreeSet::new(); - assert!(deploy.approvals.is_empty()); - check_is_not_valid(deploy, DeployConfigFailure::EmptyApprovals) - } - - #[test] - fn not_valid_due_to_invalid_approval() { - let mut rng = TestRng::new(); - let mut deploy = create_deploy(&mut rng, TransactionConfig::default().max_ttl, 0, "net-1"); - - let deploy2 = Deploy::random(&mut rng); - - deploy.approvals.extend(deploy2.approvals.clone()); - // the expected index for the invalid approval will be the first index at which there is an - // approval coming from deploy2 - let expected_index = deploy - .approvals - .iter() - .enumerate() - .find(|(_, approval)| deploy2.approvals.contains(approval)) - .map(|(index, _)| index) - .unwrap(); - check_is_not_valid( - deploy, - DeployConfigFailure::InvalidApproval { - index: expected_index, - error: crypto::Error::SignatureError, // This field is ignored in the check. - }, - ); - } - - #[test] - fn is_acceptable() { - let mut rng = TestRng::new(); - let chain_name = "net-1"; - let config = TransactionConfig::default(); - - let deploy = create_deploy( - &mut rng, - config.max_ttl, - config.deploy_config.max_dependencies.into(), - chain_name, - ); - let current_timestamp = deploy.header().timestamp(); - deploy - .is_config_compliant( - chain_name, - &config, - DEFAULT_MAX_ASSOCIATED_KEYS, - TimeDiff::default(), - current_timestamp, - ) - .expect("should be acceptable"); - } - - #[test] - fn not_acceptable_due_to_invalid_chain_name() { - let mut rng = TestRng::new(); - let expected_chain_name = "net-1"; - let wrong_chain_name = "net-2".to_string(); - let config = TransactionConfig::default(); - - let deploy = create_deploy( - &mut rng, - config.max_ttl, - config.deploy_config.max_dependencies.into(), - &wrong_chain_name, - ); - - let expected_error = DeployConfigFailure::InvalidChainName { - expected: expected_chain_name.to_string(), - got: wrong_chain_name, - }; - - let current_timestamp = deploy.header().timestamp(); - assert_eq!( - deploy.is_config_compliant( - expected_chain_name, - &config, - DEFAULT_MAX_ASSOCIATED_KEYS, - TimeDiff::default(), - current_timestamp - ), - Err(expected_error) - ); - assert!( - deploy.is_valid.get().is_none(), - "deploy should not have run expensive `is_valid` call" - ); - } - - #[test] - fn not_acceptable_due_to_excessive_dependencies() { - let mut rng = TestRng::new(); - let chain_name = "net-1"; - let config = TransactionConfig::default(); - - let dependency_count = usize::from(config.deploy_config.max_dependencies + 1); - - let deploy = create_deploy(&mut rng, config.max_ttl, dependency_count, chain_name); - - let expected_error = DeployConfigFailure::ExcessiveDependencies { - max_dependencies: config.deploy_config.max_dependencies, - got: dependency_count, - }; - - let current_timestamp = deploy.header().timestamp(); - assert_eq!( - deploy.is_config_compliant( - chain_name, - &config, - DEFAULT_MAX_ASSOCIATED_KEYS, - TimeDiff::default(), - current_timestamp - ), - Err(expected_error) - ); - assert!( - deploy.is_valid.get().is_none(), - "deploy should not have run expensive `is_valid` call" - ); - } - - #[test] - fn not_acceptable_due_to_excessive_ttl() { - let mut rng = TestRng::new(); - let chain_name = "net-1"; - let config = TransactionConfig::default(); - - let ttl = config.max_ttl + TimeDiff::from(Duration::from_secs(1)); - - let deploy = create_deploy( - &mut rng, - ttl, - config.deploy_config.max_dependencies.into(), - chain_name, - ); - - let expected_error = DeployConfigFailure::ExcessiveTimeToLive { - max_ttl: config.max_ttl, - got: ttl, - }; - - let current_timestamp = deploy.header().timestamp(); - assert_eq!( - deploy.is_config_compliant( - chain_name, - &config, - DEFAULT_MAX_ASSOCIATED_KEYS, - TimeDiff::default(), - current_timestamp - ), - Err(expected_error) - ); - assert!( - deploy.is_valid.get().is_none(), - "deploy should not have run expensive `is_valid` call" - ); - } - - #[test] - fn not_acceptable_due_to_timestamp_in_future() { - let mut rng = TestRng::new(); - let chain_name = "net-1"; - let config = TransactionConfig::default(); - let leeway = TimeDiff::from_seconds(2); - - let deploy = create_deploy( - &mut rng, - config.max_ttl, - config.deploy_config.max_dependencies.into(), - chain_name, - ); - let current_timestamp = deploy.header.timestamp() - leeway - TimeDiff::from_seconds(1); - - let expected_error = DeployConfigFailure::TimestampInFuture { - validation_timestamp: current_timestamp, - timestamp_leeway: leeway, - got: deploy.header.timestamp(), - }; - - assert_eq!( - deploy.is_config_compliant( - chain_name, - &config, - DEFAULT_MAX_ASSOCIATED_KEYS, - leeway, - current_timestamp - ), - Err(expected_error) - ); - assert!( - deploy.is_valid.get().is_none(), - "deploy should not have run expensive `is_valid` call" - ); - } - - #[test] - fn acceptable_if_timestamp_slightly_in_future() { - let mut rng = TestRng::new(); - let chain_name = "net-1"; - let config = TransactionConfig::default(); - let leeway = TimeDiff::from_seconds(2); - - let deploy = create_deploy( - &mut rng, - config.max_ttl, - config.deploy_config.max_dependencies.into(), - chain_name, - ); - let current_timestamp = deploy.header.timestamp() - (leeway / 2); - deploy - .is_config_compliant( - chain_name, - &config, - DEFAULT_MAX_ASSOCIATED_KEYS, - leeway, - current_timestamp, - ) - .expect("should be acceptable"); - } - - #[test] - fn not_acceptable_due_to_missing_payment_amount() { - let mut rng = TestRng::new(); - let chain_name = "net-1"; - let config = TransactionConfig::default(); - - let payment = ExecutableDeployItem::ModuleBytes { - module_bytes: Bytes::new(), - args: RuntimeArgs::default(), - }; - - // Create an empty session object that is not transfer to ensure - // that the payment amount is checked. - let session = ExecutableDeployItem::StoredContractByName { - name: "".to_string(), - entry_point: "".to_string(), - args: Default::default(), - }; - - let mut deploy = create_deploy( - &mut rng, - config.max_ttl, - config.deploy_config.max_dependencies.into(), - chain_name, - ); - - deploy.payment = payment; - deploy.session = session; - - let current_timestamp = deploy.header().timestamp(); - assert_eq!( - deploy.is_config_compliant( - chain_name, - &config, - DEFAULT_MAX_ASSOCIATED_KEYS, - TimeDiff::default(), - current_timestamp - ), - Err(DeployConfigFailure::MissingPaymentAmount) - ); - assert!( - deploy.is_valid.get().is_none(), - "deploy should not have run expensive `is_valid` call" - ); - } - - #[test] - fn not_acceptable_due_to_mangled_payment_amount() { - let mut rng = TestRng::new(); - let chain_name = "net-1"; - let config = TransactionConfig::default(); - - let payment = ExecutableDeployItem::ModuleBytes { - module_bytes: Bytes::new(), - args: runtime_args! { - "amount" => "mangled-amount" - }, - }; - - // Create an empty session object that is not transfer to ensure - // that the payment amount is checked. - let session = ExecutableDeployItem::StoredContractByName { - name: "".to_string(), - entry_point: "".to_string(), - args: Default::default(), - }; - - let mut deploy = create_deploy( - &mut rng, - config.max_ttl, - config.deploy_config.max_dependencies.into(), - chain_name, - ); - - deploy.payment = payment; - deploy.session = session; - - let current_timestamp = deploy.header().timestamp(); - assert_eq!( - deploy.is_config_compliant( - chain_name, - &config, - DEFAULT_MAX_ASSOCIATED_KEYS, - TimeDiff::default(), - current_timestamp - ), - Err(DeployConfigFailure::FailedToParsePaymentAmount) - ); - assert!( - deploy.is_valid.get().is_none(), - "deploy should not have run expensive `is_valid` call" - ); - } - - #[test] - fn not_acceptable_due_to_excessive_payment_amount() { - let mut rng = TestRng::new(); - let chain_name = "net-1"; - let config = TransactionConfig::default(); - let amount = U512::from(config.block_gas_limit + 1); - - let payment = ExecutableDeployItem::ModuleBytes { - module_bytes: Bytes::new(), - args: runtime_args! { - "amount" => amount - }, - }; - - // Create an empty session object that is not transfer to ensure - // that the payment amount is checked. - let session = ExecutableDeployItem::StoredContractByName { - name: "".to_string(), - entry_point: "".to_string(), - args: Default::default(), - }; - - let mut deploy = create_deploy( - &mut rng, - config.max_ttl, - config.deploy_config.max_dependencies.into(), - chain_name, - ); - - deploy.payment = payment; - deploy.session = session; - - let expected_error = DeployConfigFailure::ExceededBlockGasLimit { - block_gas_limit: config.block_gas_limit, - got: Box::new(amount), - }; - - let current_timestamp = deploy.header().timestamp(); - assert_eq!( - deploy.is_config_compliant( - chain_name, - &config, - DEFAULT_MAX_ASSOCIATED_KEYS, - TimeDiff::default(), - current_timestamp - ), - Err(expected_error) - ); - assert!( - deploy.is_valid.get().is_none(), - "deploy should not have run expensive `is_valid` call" - ); - } - - #[test] - fn transfer_acceptable_regardless_of_excessive_payment_amount() { - let mut rng = TestRng::new(); - let secret_key = SecretKey::random(&mut rng); - let chain_name = "net-1"; - let config = TransactionConfig::default(); - let amount = U512::from(config.block_gas_limit + 1); - - let payment = ExecutableDeployItem::ModuleBytes { - module_bytes: Bytes::new(), - args: runtime_args! { - "amount" => amount - }, - }; - - let transfer_args = { - let mut transfer_args = RuntimeArgs::new(); - let value = CLValue::from_t(U512::from(DEFAULT_MIN_TRANSFER_MOTES)) - .expect("should create CLValue"); - transfer_args.insert_cl_value("amount", value); - transfer_args - }; - - let deploy = Deploy::new( - Timestamp::now(), - config.max_ttl, - 1, - vec![], - chain_name.to_string(), - payment, - ExecutableDeployItem::Transfer { - args: transfer_args, - }, - &secret_key, - None, - ); - - let current_timestamp = deploy.header().timestamp(); - assert_eq!( - Ok(()), - deploy.is_config_compliant( - chain_name, - &config, - DEFAULT_MAX_ASSOCIATED_KEYS, - TimeDiff::default(), - current_timestamp - ) - ) - } - - #[test] - fn not_acceptable_due_to_excessive_approvals() { - let mut rng = TestRng::new(); - let chain_name = "net-1"; - let config = TransactionConfig::default(); - let deploy = create_deploy( - &mut rng, - config.max_ttl, - config.deploy_config.max_dependencies as usize, - chain_name, - ); - // This test is to ensure a given limit is being checked. - // Therefore, set the limit to one less than the approvals in the deploy. - let max_associated_keys = (deploy.approvals.len() - 1) as u32; - let current_timestamp = deploy.header().timestamp(); - assert_eq!( - Err(DeployConfigFailure::ExcessiveApprovals { - got: deploy.approvals.len() as u32, - max_associated_keys: (deploy.approvals.len() - 1) as u32 - }), - deploy.is_config_compliant( - chain_name, - &config, - max_associated_keys, - TimeDiff::default(), - current_timestamp - ) - ) - } - - #[test] - fn not_acceptable_due_to_missing_transfer_amount() { - let mut rng = TestRng::new(); - let chain_name = "net-1"; - let config = TransactionConfig::default(); - let mut deploy = create_deploy( - &mut rng, - config.max_ttl, - config.deploy_config.max_dependencies as usize, - chain_name, - ); - - let transfer_args = RuntimeArgs::default(); - let session = ExecutableDeployItem::Transfer { - args: transfer_args, - }; - deploy.session = session; - - let current_timestamp = deploy.header().timestamp(); - assert_eq!( - Err(DeployConfigFailure::MissingTransferAmount), - deploy.is_config_compliant( - chain_name, - &config, - DEFAULT_MAX_ASSOCIATED_KEYS, - TimeDiff::default(), - current_timestamp - ) - ) - } - - #[test] - fn not_acceptable_due_to_mangled_transfer_amount() { - let mut rng = TestRng::new(); - let chain_name = "net-1"; - let config = TransactionConfig::default(); - let mut deploy = create_deploy( - &mut rng, - config.max_ttl, - config.deploy_config.max_dependencies as usize, - chain_name, - ); - - let transfer_args = runtime_args! { - "amount" => "mangled-amount", - "source" => PublicKey::random(&mut rng).to_account_hash(), - "target" => PublicKey::random(&mut rng).to_account_hash(), - }; - let session = ExecutableDeployItem::Transfer { - args: transfer_args, - }; - deploy.session = session; - - let current_timestamp = deploy.header().timestamp(); - assert_eq!( - Err(DeployConfigFailure::FailedToParseTransferAmount), - deploy.is_config_compliant( - chain_name, - &config, - DEFAULT_MAX_ASSOCIATED_KEYS, - TimeDiff::default(), - current_timestamp - ) - ) - } - - #[test] - fn not_acceptable_due_to_insufficient_transfer_amount() { - let mut rng = TestRng::new(); - let chain_name = "net-1"; - let config = TransactionConfig::default(); - let mut deploy = create_deploy( - &mut rng, - config.max_ttl, - config.deploy_config.max_dependencies as usize, - chain_name, - ); - - let amount = config.native_transfer_minimum_motes - 1; - let insufficient_amount = U512::from(amount); - - let transfer_args = runtime_args! { - "amount" => insufficient_amount, - "source" => PublicKey::random(&mut rng).to_account_hash(), - "target" => PublicKey::random(&mut rng).to_account_hash(), - }; - let session = ExecutableDeployItem::Transfer { - args: transfer_args, - }; - deploy.session = session; - - let current_timestamp = deploy.header().timestamp(); - assert_eq!( - Err(DeployConfigFailure::InsufficientTransferAmount { - minimum: Box::new(U512::from(config.native_transfer_minimum_motes)), - attempted: Box::new(insufficient_amount), - }), - deploy.is_config_compliant( - chain_name, - &config, - DEFAULT_MAX_ASSOCIATED_KEYS, - TimeDiff::default(), - current_timestamp - ) - ) - } -} diff --git a/casper_types_ver_2_0/src/transaction/deploy/deploy_approval.rs b/casper_types_ver_2_0/src/transaction/deploy/deploy_approval.rs deleted file mode 100644 index f01a74f7..00000000 --- a/casper_types_ver_2_0/src/transaction/deploy/deploy_approval.rs +++ /dev/null @@ -1,103 +0,0 @@ -use alloc::vec::Vec; -use core::fmt::{self, Display, Formatter}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -use super::DeployHash; -#[cfg(any(all(feature = "std", feature = "testing"), test))] -use crate::testing::TestRng; -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - crypto, PublicKey, SecretKey, Signature, -}; - -/// A struct containing a signature of a deploy hash and the public key of the signer. -#[derive(Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[serde(deny_unknown_fields)] -pub struct DeployApproval { - signer: PublicKey, - signature: Signature, -} - -impl DeployApproval { - /// Creates an approval by signing the given deploy hash using the given secret key. - pub fn create(hash: &DeployHash, secret_key: &SecretKey) -> Self { - let signer = PublicKey::from(secret_key); - let signature = crypto::sign(hash, secret_key, &signer); - Self { signer, signature } - } - - /// Returns a new approval. - pub fn new(signer: PublicKey, signature: Signature) -> Self { - Self { signer, signature } - } - - /// Returns the public key of the approval's signer. - pub fn signer(&self) -> &PublicKey { - &self.signer - } - - /// Returns the approval signature. - pub fn signature(&self) -> &Signature { - &self.signature - } - - /// Returns a random `Approval`. - #[cfg(any(all(feature = "std", feature = "testing"), test))] - pub fn random(rng: &mut TestRng) -> Self { - Self { - signer: PublicKey::random(rng), - signature: Signature::ed25519([0; Signature::ED25519_LENGTH]).unwrap(), - } - } -} - -impl Display for DeployApproval { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - write!(formatter, "approval({})", self.signer) - } -} - -impl ToBytes for DeployApproval { - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.signer.write_bytes(writer)?; - self.signature.write_bytes(writer) - } - - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - self.signer.serialized_length() + self.signature.serialized_length() - } -} - -impl FromBytes for DeployApproval { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (signer, remainder) = PublicKey::from_bytes(bytes)?; - let (signature, remainder) = Signature::from_bytes(remainder)?; - let approval = DeployApproval { signer, signature }; - Ok((approval, remainder)) - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn bytesrepr_roundtrip() { - let rng = &mut TestRng::new(); - let approval = DeployApproval::random(rng); - bytesrepr::test_serialization_roundtrip(&approval); - } -} diff --git a/casper_types_ver_2_0/src/transaction/deploy/deploy_approvals_hash.rs b/casper_types_ver_2_0/src/transaction/deploy/deploy_approvals_hash.rs deleted file mode 100644 index 6c098805..00000000 --- a/casper_types_ver_2_0/src/transaction/deploy/deploy_approvals_hash.rs +++ /dev/null @@ -1,111 +0,0 @@ -use alloc::{collections::BTreeSet, vec::Vec}; -use core::fmt::{self, Display, Formatter}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(any(feature = "testing", test))] -use rand::Rng; -use serde::{Deserialize, Serialize}; - -use super::DeployApproval; -#[cfg(any(feature = "testing", test))] -use crate::testing::TestRng; -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - Digest, -}; - -/// The cryptographic hash of the bytesrepr-encoded set of approvals for a single deploy. -#[derive( - Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Serialize, Deserialize, Debug, Default, -)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[serde(deny_unknown_fields)] -pub struct DeployApprovalsHash(Digest); - -impl DeployApprovalsHash { - /// The number of bytes in a `DeployApprovalsHash` digest. - pub const LENGTH: usize = Digest::LENGTH; - - /// Constructs a new `DeployApprovalsHash` by bytesrepr-encoding `approvals` and creating a - /// [`Digest`] of this. - pub fn compute(approvals: &BTreeSet) -> Result { - let digest = Digest::hash(approvals.to_bytes()?); - Ok(DeployApprovalsHash(digest)) - } - - /// Returns the wrapped inner digest. - pub fn inner(&self) -> &Digest { - &self.0 - } - - /// Returns a new `DeployApprovalsHash` directly initialized with the provided bytes; no - /// hashing is done. - #[cfg(any(feature = "testing", test))] - pub const fn from_raw(raw_digest: [u8; Self::LENGTH]) -> Self { - DeployApprovalsHash(Digest::from_raw(raw_digest)) - } - - /// Returns a random `DeployApprovalsHash`. - #[cfg(any(feature = "testing", test))] - pub fn random(rng: &mut TestRng) -> Self { - let hash = rng.gen::<[u8; Digest::LENGTH]>().into(); - DeployApprovalsHash(hash) - } -} - -impl From for Digest { - fn from(deploy_hash: DeployApprovalsHash) -> Self { - deploy_hash.0 - } -} - -impl From for DeployApprovalsHash { - fn from(digest: Digest) -> Self { - Self(digest) - } -} - -impl Display for DeployApprovalsHash { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - write!(formatter, "approvals-hash({})", self.0,) - } -} - -impl AsRef<[u8]> for DeployApprovalsHash { - fn as_ref(&self) -> &[u8] { - self.0.as_ref() - } -} - -impl ToBytes for DeployApprovalsHash { - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.0.write_bytes(writer) - } - - fn to_bytes(&self) -> Result, bytesrepr::Error> { - self.0.to_bytes() - } - - fn serialized_length(&self) -> usize { - self.0.serialized_length() - } -} - -impl FromBytes for DeployApprovalsHash { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - Digest::from_bytes(bytes).map(|(inner, remainder)| (DeployApprovalsHash(inner), remainder)) - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn bytesrepr_roundtrip() { - let rng = &mut TestRng::new(); - let hash = DeployApprovalsHash::random(rng); - bytesrepr::test_serialization_roundtrip(&hash); - } -} diff --git a/casper_types_ver_2_0/src/transaction/deploy/deploy_builder.rs b/casper_types_ver_2_0/src/transaction/deploy/deploy_builder.rs deleted file mode 100644 index 7c79e0de..00000000 --- a/casper_types_ver_2_0/src/transaction/deploy/deploy_builder.rs +++ /dev/null @@ -1,155 +0,0 @@ -mod error; - -use super::{ - super::{InitiatorAddr, InitiatorAddrAndSecretKey}, - Deploy, DeployHash, ExecutableDeployItem, TransferTarget, -}; -use crate::{PublicKey, SecretKey, TimeDiff, Timestamp, URef, U512}; -pub use error::DeployBuilderError; - -/// A builder for constructing a [`Deploy`]. -pub struct DeployBuilder<'a> { - account: Option, - secret_key: Option<&'a SecretKey>, - timestamp: Timestamp, - ttl: TimeDiff, - gas_price: u64, - dependencies: Vec, - chain_name: String, - payment: Option, - session: ExecutableDeployItem, -} - -impl<'a> DeployBuilder<'a> { - /// The default time-to-live for `Deploy`s, i.e. 30 minutes. - pub const DEFAULT_TTL: TimeDiff = TimeDiff::from_millis(30 * 60 * 1_000); - /// The default gas price for `Deploy`s, i.e. `1`. - pub const DEFAULT_GAS_PRICE: u64 = 1; - - /// Returns a new `DeployBuilder`. - /// - /// # Note - /// - /// Before calling [`build`](Self::build), you must ensure - /// * that an account is provided by either calling [`with_account`](Self::with_account) or - /// [`with_secret_key`](Self::with_secret_key) - /// * that payment code is provided by either calling - /// [`with_standard_payment`](Self::with_standard_payment) or - /// [`with_payment`](Self::with_payment) - pub fn new>(chain_name: C, session: ExecutableDeployItem) -> Self { - DeployBuilder { - account: None, - secret_key: None, - timestamp: Timestamp::now(), - ttl: Self::DEFAULT_TTL, - gas_price: Self::DEFAULT_GAS_PRICE, - dependencies: vec![], - chain_name: chain_name.into(), - payment: None, - session, - } - } - - /// Returns a new `DeployBuilder` with session code suitable for a transfer. - /// - /// If `maybe_source` is None, the account's main purse is used as the source of the transfer. - /// - /// # Note - /// - /// Before calling [`build`](Self::build), you must ensure - /// * that an account is provided by either calling [`with_account`](Self::with_account) or - /// [`with_secret_key`](Self::with_secret_key) - /// * that payment code is provided by either calling - /// [`with_standard_payment`](Self::with_standard_payment) or - /// [`with_payment`](Self::with_payment) - pub fn new_transfer, A: Into>( - chain_name: C, - amount: A, - maybe_source: Option, - target: TransferTarget, - maybe_transfer_id: Option, - ) -> Self { - let session = - ExecutableDeployItem::new_transfer(amount, maybe_source, target, maybe_transfer_id); - DeployBuilder::new(chain_name, session) - } - - /// Sets the `account` in the `Deploy`. - /// - /// If not provided, the public key derived from the secret key used in the `DeployBuilder` will - /// be used as the `account` in the `Deploy`. - pub fn with_account(mut self, account: PublicKey) -> Self { - self.account = Some(account); - self - } - - /// Sets the secret key used to sign the `Deploy` on calling [`build`](Self::build). - /// - /// If not provided, the `Deploy` can still be built, but will be unsigned and will be invalid - /// until subsequently signed. - pub fn with_secret_key(mut self, secret_key: &'a SecretKey) -> Self { - self.secret_key = Some(secret_key); - self - } - - /// Sets the `payment` in the `Deploy` to a standard payment with the given amount. - pub fn with_standard_payment>(mut self, amount: A) -> Self { - self.payment = Some(ExecutableDeployItem::new_standard_payment(amount)); - self - } - - /// Sets the `payment` in the `Deploy`. - pub fn with_payment(mut self, payment: ExecutableDeployItem) -> Self { - self.payment = Some(payment); - self - } - - /// Sets the `timestamp` in the `Deploy`. - /// - /// If not provided, the timestamp will be set to the time when the `DeployBuilder` was - /// constructed. - pub fn with_timestamp(mut self, timestamp: Timestamp) -> Self { - self.timestamp = timestamp; - self - } - - /// Sets the `ttl` (time-to-live) in the `Deploy`. - /// - /// If not provided, the ttl will be set to [`Self::DEFAULT_TTL`]. - pub fn with_ttl(mut self, ttl: TimeDiff) -> Self { - self.ttl = ttl; - self - } - - /// Returns the new `Deploy`, or an error if neither - /// [`with_standard_payment`](Self::with_standard_payment) nor - /// [`with_payment`](Self::with_payment) were previously called. - pub fn build(self) -> Result { - let initiator_addr_and_secret_key = match (self.account, self.secret_key) { - (Some(account), Some(secret_key)) => InitiatorAddrAndSecretKey::Both { - initiator_addr: InitiatorAddr::PublicKey(account), - secret_key, - }, - (Some(account), None) => { - InitiatorAddrAndSecretKey::InitiatorAddr(InitiatorAddr::PublicKey(account)) - } - (None, Some(secret_key)) => InitiatorAddrAndSecretKey::SecretKey(secret_key), - (None, None) => return Err(DeployBuilderError::DeployMissingSessionAccount), - }; - - let payment = self - .payment - .ok_or(DeployBuilderError::DeployMissingPaymentCode)?; - let deploy = Deploy::build( - self.timestamp, - self.ttl, - self.gas_price, - self.dependencies, - self.chain_name, - payment, - self.session, - initiator_addr_and_secret_key, - ); - Ok(deploy) - } -} diff --git a/casper_types_ver_2_0/src/transaction/deploy/deploy_builder/error.rs b/casper_types_ver_2_0/src/transaction/deploy/deploy_builder/error.rs deleted file mode 100644 index 30ac6fa6..00000000 --- a/casper_types_ver_2_0/src/transaction/deploy/deploy_builder/error.rs +++ /dev/null @@ -1,44 +0,0 @@ -use core::fmt::{self, Display, Formatter}; -#[cfg(feature = "std")] -use std::error::Error as StdError; - -#[cfg(doc)] -use super::{Deploy, DeployBuilder}; - -/// Errors returned while building a [`Deploy`] using a [`DeployBuilder`]. -#[derive(Clone, Eq, PartialEq, Debug)] -#[non_exhaustive] -pub enum DeployBuilderError { - /// Failed to build `Deploy` due to missing session account. - /// - /// Call [`DeployBuilder::with_account`] or [`DeployBuilder::with_secret_key`] before - /// calling [`DeployBuilder::build`]. - DeployMissingSessionAccount, - /// Failed to build `Deploy` due to missing payment code. - /// - /// Call [`DeployBuilder::with_standard_payment`] or [`DeployBuilder::with_payment`] before - /// calling [`DeployBuilder::build`]. - DeployMissingPaymentCode, -} - -impl Display for DeployBuilderError { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - match self { - DeployBuilderError::DeployMissingSessionAccount => { - write!( - formatter, - "deploy requires session account - use `with_account` or `with_secret_key`" - ) - } - DeployBuilderError::DeployMissingPaymentCode => { - write!( - formatter, - "deploy requires payment code - use `with_payment` or `with_standard_payment`" - ) - } - } - } -} - -#[cfg(feature = "std")] -impl StdError for DeployBuilderError {} diff --git a/casper_types_ver_2_0/src/transaction/deploy/deploy_footprint.rs b/casper_types_ver_2_0/src/transaction/deploy/deploy_footprint.rs deleted file mode 100644 index c45d23b8..00000000 --- a/casper_types_ver_2_0/src/transaction/deploy/deploy_footprint.rs +++ /dev/null @@ -1,28 +0,0 @@ -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(any(feature = "std", test))] -use serde::{Deserialize, Serialize}; - -#[cfg(doc)] -use super::Deploy; -use super::DeployHeader; -use crate::Gas; - -/// Information about how much block limit a [`Deploy`] will consume. -#[derive(Clone, Debug)] -#[cfg_attr( - any(feature = "std", test), - derive(Serialize, Deserialize), - serde(deny_unknown_fields) -)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -pub struct DeployFootprint { - /// The header of the `Deploy`. - pub header: DeployHeader, - /// The estimated gas consumption of the `Deploy`. - pub gas_estimate: Gas, - /// The bytesrepr serialized length of the `Deploy`. - pub size_estimate: usize, - /// Whether the `Deploy` is a transfer or not. - pub is_transfer: bool, -} diff --git a/casper_types_ver_2_0/src/transaction/deploy/deploy_hash.rs b/casper_types_ver_2_0/src/transaction/deploy/deploy_hash.rs deleted file mode 100644 index 0b38d6de..00000000 --- a/casper_types_ver_2_0/src/transaction/deploy/deploy_hash.rs +++ /dev/null @@ -1,116 +0,0 @@ -use alloc::vec::Vec; -use core::fmt::{self, Display, Formatter}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(any(feature = "testing", test))] -use rand::Rng; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -#[cfg(doc)] -use super::Deploy; -#[cfg(any(feature = "testing", test))] -use crate::testing::TestRng; -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - Digest, -}; - -/// The cryptographic hash of a [`Deploy`]. -#[derive( - Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Serialize, Deserialize, Debug, Default, -)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr( - feature = "json-schema", - derive(JsonSchema), - schemars(description = "Hex-encoded deploy hash.") -)] -#[serde(deny_unknown_fields)] -pub struct DeployHash(Digest); - -impl DeployHash { - /// The number of bytes in a `DeployHash` digest. - pub const LENGTH: usize = Digest::LENGTH; - - /// Constructs a new `DeployHash`. - pub const fn new(hash: Digest) -> Self { - DeployHash(hash) - } - - /// Returns the wrapped inner digest. - pub fn inner(&self) -> &Digest { - &self.0 - } - - /// Returns a new `DeployHash` directly initialized with the provided bytes; no hashing is done. - #[cfg(any(feature = "testing", test))] - pub const fn from_raw(raw_digest: [u8; Self::LENGTH]) -> Self { - DeployHash(Digest::from_raw(raw_digest)) - } - - /// Returns a random `DeployHash`. - #[cfg(any(feature = "testing", test))] - pub fn random(rng: &mut TestRng) -> Self { - let hash = rng.gen::<[u8; Digest::LENGTH]>().into(); - DeployHash(hash) - } -} - -impl From for DeployHash { - fn from(digest: Digest) -> Self { - DeployHash(digest) - } -} - -impl From for Digest { - fn from(deploy_hash: DeployHash) -> Self { - deploy_hash.0 - } -} - -impl Display for DeployHash { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - write!(formatter, "deploy-hash({})", self.0,) - } -} - -impl AsRef<[u8]> for DeployHash { - fn as_ref(&self) -> &[u8] { - self.0.as_ref() - } -} - -impl ToBytes for DeployHash { - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.0.write_bytes(writer) - } - - fn to_bytes(&self) -> Result, bytesrepr::Error> { - self.0.to_bytes() - } - - fn serialized_length(&self) -> usize { - self.0.serialized_length() - } -} - -impl FromBytes for DeployHash { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - Digest::from_bytes(bytes).map(|(inner, remainder)| (DeployHash(inner), remainder)) - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn bytesrepr_roundtrip() { - let rng = &mut TestRng::new(); - let hash = DeployHash::random(rng); - bytesrepr::test_serialization_roundtrip(&hash); - } -} diff --git a/casper_types_ver_2_0/src/transaction/deploy/deploy_header.rs b/casper_types_ver_2_0/src/transaction/deploy/deploy_header.rs deleted file mode 100644 index 37bc7ea1..00000000 --- a/casper_types_ver_2_0/src/transaction/deploy/deploy_header.rs +++ /dev/null @@ -1,230 +0,0 @@ -use alloc::{string::String, vec::Vec}; -use core::fmt::{self, Display, Formatter}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -#[cfg(any(feature = "std", test))] -use serde::{Deserialize, Serialize}; -#[cfg(any(feature = "std", test))] -use tracing::debug; - -#[cfg(doc)] -use super::Deploy; -use super::DeployHash; -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - Digest, DisplayIter, PublicKey, TimeDiff, Timestamp, -}; -#[cfg(any(feature = "std", test))] -use crate::{DeployConfigFailure, TransactionConfig}; - -/// The header portion of a [`Deploy`]. -#[derive(Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Debug)] -#[cfg_attr( - any(feature = "std", test), - derive(Serialize, Deserialize), - serde(deny_unknown_fields) -)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -pub struct DeployHeader { - account: PublicKey, - timestamp: Timestamp, - ttl: TimeDiff, - gas_price: u64, - body_hash: Digest, - dependencies: Vec, - chain_name: String, -} - -impl DeployHeader { - #[cfg(any(feature = "std", feature = "json-schema", test))] - pub(super) fn new( - account: PublicKey, - timestamp: Timestamp, - ttl: TimeDiff, - gas_price: u64, - body_hash: Digest, - dependencies: Vec, - chain_name: String, - ) -> Self { - DeployHeader { - account, - timestamp, - ttl, - gas_price, - body_hash, - dependencies, - chain_name, - } - } - - /// Returns the public key of the account providing the context in which to run the `Deploy`. - pub fn account(&self) -> &PublicKey { - &self.account - } - - /// Returns the creation timestamp of the `Deploy`. - pub fn timestamp(&self) -> Timestamp { - self.timestamp - } - - /// Returns the duration after the creation timestamp for which the `Deploy` will stay valid. - /// - /// After this duration has ended, the `Deploy` will be considered expired. - pub fn ttl(&self) -> TimeDiff { - self.ttl - } - - /// Returns `true` if the `Deploy` has expired. - pub fn expired(&self, current_instant: Timestamp) -> bool { - self.expires() < current_instant - } - - /// Returns the price per gas unit for the `Deploy`. - pub fn gas_price(&self) -> u64 { - self.gas_price - } - - /// Returns the hash of the body (i.e. the Wasm code) of the `Deploy`. - pub fn body_hash(&self) -> &Digest { - &self.body_hash - } - - /// Returns the list of other `Deploy`s that have to be executed before this one. - pub fn dependencies(&self) -> &Vec { - &self.dependencies - } - - /// Returns the name of the chain the `Deploy` should be executed on. - pub fn chain_name(&self) -> &str { - &self.chain_name - } - - /// Returns `Ok` if and only if the dependencies count and TTL are within limits, and the - /// timestamp is not later than `at + timestamp_leeway`. Does NOT check for expiry. - #[cfg(any(feature = "std", test))] - pub fn is_valid( - &self, - config: &TransactionConfig, - timestamp_leeway: TimeDiff, - at: Timestamp, - deploy_hash: &DeployHash, - ) -> Result<(), DeployConfigFailure> { - if self.dependencies.len() > config.deploy_config.max_dependencies as usize { - debug!( - %deploy_hash, - deploy_header = %self, - max_dependencies = %config.deploy_config.max_dependencies, - "deploy dependency ceiling exceeded" - ); - return Err(DeployConfigFailure::ExcessiveDependencies { - max_dependencies: config.deploy_config.max_dependencies, - got: self.dependencies().len(), - }); - } - - if self.ttl() > config.max_ttl { - debug!( - %deploy_hash, - deploy_header = %self, - max_ttl = %config.max_ttl, - "deploy ttl excessive" - ); - return Err(DeployConfigFailure::ExcessiveTimeToLive { - max_ttl: config.max_ttl, - got: self.ttl(), - }); - } - - if self.timestamp() > at + timestamp_leeway { - debug!(%deploy_hash, deploy_header = %self, %at, "deploy timestamp in the future"); - return Err(DeployConfigFailure::TimestampInFuture { - validation_timestamp: at, - timestamp_leeway, - got: self.timestamp(), - }); - } - - Ok(()) - } - - /// Returns the timestamp of when the `Deploy` expires, i.e. `self.timestamp + self.ttl`. - pub fn expires(&self) -> Timestamp { - self.timestamp.saturating_add(self.ttl) - } - - #[cfg(any(all(feature = "std", feature = "testing"), test))] - pub(super) fn invalidate(&mut self) { - self.chain_name.clear(); - } -} - -impl ToBytes for DeployHeader { - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.account.write_bytes(writer)?; - self.timestamp.write_bytes(writer)?; - self.ttl.write_bytes(writer)?; - self.gas_price.write_bytes(writer)?; - self.body_hash.write_bytes(writer)?; - self.dependencies.write_bytes(writer)?; - self.chain_name.write_bytes(writer) - } - - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - self.account.serialized_length() - + self.timestamp.serialized_length() - + self.ttl.serialized_length() - + self.gas_price.serialized_length() - + self.body_hash.serialized_length() - + self.dependencies.serialized_length() - + self.chain_name.serialized_length() - } -} - -impl FromBytes for DeployHeader { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (account, remainder) = PublicKey::from_bytes(bytes)?; - let (timestamp, remainder) = Timestamp::from_bytes(remainder)?; - let (ttl, remainder) = TimeDiff::from_bytes(remainder)?; - let (gas_price, remainder) = u64::from_bytes(remainder)?; - let (body_hash, remainder) = Digest::from_bytes(remainder)?; - let (dependencies, remainder) = Vec::::from_bytes(remainder)?; - let (chain_name, remainder) = String::from_bytes(remainder)?; - let deploy_header = DeployHeader { - account, - timestamp, - ttl, - gas_price, - body_hash, - dependencies, - chain_name, - }; - Ok((deploy_header, remainder)) - } -} - -impl Display for DeployHeader { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - write!( - formatter, - "deploy-header[account: {}, timestamp: {}, ttl: {}, gas_price: {}, body_hash: {}, \ - dependencies: [{}], chain_name: {}]", - self.account, - self.timestamp, - self.ttl, - self.gas_price, - self.body_hash, - DisplayIter::new(self.dependencies.iter()), - self.chain_name, - ) - } -} diff --git a/casper_types_ver_2_0/src/transaction/deploy/deploy_id.rs b/casper_types_ver_2_0/src/transaction/deploy/deploy_id.rs deleted file mode 100644 index 82bf91a2..00000000 --- a/casper_types_ver_2_0/src/transaction/deploy/deploy_id.rs +++ /dev/null @@ -1,116 +0,0 @@ -use alloc::vec::Vec; -use core::fmt::{self, Display, Formatter}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -use serde::{Deserialize, Serialize}; - -#[cfg(doc)] -use super::Deploy; -use super::{DeployApprovalsHash, DeployHash}; -#[cfg(any(feature = "testing", test))] -use crate::testing::TestRng; -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - TransactionId, -}; - -/// The unique identifier of a [`Deploy`], comprising its [`DeployHash`] and -/// [`DeployApprovalsHash`]. -#[derive( - Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Serialize, Deserialize, Debug, Default, -)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[serde(deny_unknown_fields)] -pub struct DeployId { - deploy_hash: DeployHash, - approvals_hash: DeployApprovalsHash, -} - -impl DeployId { - /// Returns a new `DeployId`. - pub fn new(deploy_hash: DeployHash, approvals_hash: DeployApprovalsHash) -> Self { - DeployId { - deploy_hash, - approvals_hash, - } - } - - /// Returns the deploy hash. - pub fn deploy_hash(&self) -> &DeployHash { - &self.deploy_hash - } - - /// Returns the approvals hash. - pub fn approvals_hash(&self) -> &DeployApprovalsHash { - &self.approvals_hash - } - - /// Consumes `self`, returning a tuple of the constituent parts. - pub fn destructure(self) -> (DeployHash, DeployApprovalsHash) { - (self.deploy_hash, self.approvals_hash) - } - - /// Returns a random `DeployId`. - #[cfg(any(feature = "testing", test))] - pub fn random(rng: &mut TestRng) -> Self { - DeployId::new(DeployHash::random(rng), DeployApprovalsHash::random(rng)) - } -} - -impl Display for DeployId { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - write!( - formatter, - "deploy-id({}, {})", - self.deploy_hash, self.approvals_hash - ) - } -} - -impl ToBytes for DeployId { - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.deploy_hash.write_bytes(writer)?; - self.approvals_hash.write_bytes(writer) - } - - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - self.deploy_hash.serialized_length() + self.approvals_hash.serialized_length() - } -} - -impl FromBytes for DeployId { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (deploy_hash, remainder) = DeployHash::from_bytes(bytes)?; - let (approvals_hash, remainder) = DeployApprovalsHash::from_bytes(remainder)?; - let id = DeployId::new(deploy_hash, approvals_hash); - Ok((id, remainder)) - } -} - -impl From for TransactionId { - fn from(id: DeployId) -> Self { - Self::Deploy { - deploy_hash: id.deploy_hash, - approvals_hash: id.approvals_hash, - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn bytesrepr_roundtrip() { - let rng = &mut TestRng::new(); - let id = DeployId::random(rng); - bytesrepr::test_serialization_roundtrip(&id); - } -} diff --git a/casper_types_ver_2_0/src/transaction/deploy/error.rs b/casper_types_ver_2_0/src/transaction/deploy/error.rs deleted file mode 100644 index c3388cdb..00000000 --- a/casper_types_ver_2_0/src/transaction/deploy/error.rs +++ /dev/null @@ -1,400 +0,0 @@ -use alloc::{boxed::Box, string::String}; -use core::{ - array::TryFromSliceError, - fmt::{self, Display, Formatter}, -}; -#[cfg(feature = "std")] -use std::error::Error as StdError; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -use serde::Serialize; - -use crate::{crypto, TimeDiff, Timestamp, U512}; - -/// A representation of the way in which a deploy failed validation checks. -#[derive(Clone, Eq, PartialEq, Debug)] -#[cfg_attr(feature = "std", derive(Serialize))] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[non_exhaustive] -pub enum DeployConfigFailure { - /// Invalid chain name. - InvalidChainName { - /// The expected chain name. - expected: String, - /// The received chain name. - got: String, - }, - - /// Too many dependencies. - ExcessiveDependencies { - /// The dependencies limit. - max_dependencies: u8, - /// The actual number of dependencies provided. - got: usize, - }, - - /// Deploy is too large. - ExcessiveSize(ExcessiveSizeError), - - /// Excessive time-to-live. - ExcessiveTimeToLive { - /// The time-to-live limit. - max_ttl: TimeDiff, - /// The received time-to-live. - got: TimeDiff, - }, - - /// Deploy's timestamp is in the future. - TimestampInFuture { - /// The node's timestamp when validating the deploy. - validation_timestamp: Timestamp, - /// Any configured leeway added to `validation_timestamp`. - timestamp_leeway: TimeDiff, - /// The deploy's timestamp. - got: Timestamp, - }, - - /// The provided body hash does not match the actual hash of the body. - InvalidBodyHash, - - /// The provided deploy hash does not match the actual hash of the deploy. - InvalidDeployHash, - - /// The deploy has no approvals. - EmptyApprovals, - - /// Invalid approval. - InvalidApproval { - /// The index of the approval at fault. - index: usize, - /// The approval verification error. - error: crypto::Error, - }, - - /// Excessive length of deploy's session args. - ExcessiveSessionArgsLength { - /// The byte size limit of session arguments. - max_length: usize, - /// The received length of session arguments. - got: usize, - }, - - /// Excessive length of deploy's payment args. - ExcessivePaymentArgsLength { - /// The byte size limit of payment arguments. - max_length: usize, - /// The received length of payment arguments. - got: usize, - }, - - /// Missing payment "amount" runtime argument. - MissingPaymentAmount, - - /// Failed to parse payment "amount" runtime argument. - FailedToParsePaymentAmount, - - /// The payment amount associated with the deploy exceeds the block gas limit. - ExceededBlockGasLimit { - /// Configured block gas limit. - block_gas_limit: u64, - /// The payment amount received. - got: Box, - }, - - /// Missing payment "amount" runtime argument - MissingTransferAmount, - - /// Failed to parse transfer "amount" runtime argument. - FailedToParseTransferAmount, - - /// Insufficient transfer amount. - InsufficientTransferAmount { - /// The minimum transfer amount. - minimum: Box, - /// The attempted transfer amount. - attempted: Box, - }, - - /// The amount of approvals on the deploy exceeds the max_associated_keys limit. - ExcessiveApprovals { - /// Number of approvals on the deploy. - got: u32, - /// The chainspec limit for max_associated_keys. - max_associated_keys: u32, - }, -} - -impl Display for DeployConfigFailure { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - match self { - DeployConfigFailure::InvalidChainName { expected, got } => { - write!( - formatter, - "invalid chain name: expected {}, got {}", - expected, got - ) - } - DeployConfigFailure::ExcessiveDependencies { - max_dependencies, - got, - } => { - write!( - formatter, - "{} dependencies exceeds limit of {}", - got, max_dependencies - ) - } - DeployConfigFailure::ExcessiveSize(error) => { - write!(formatter, "deploy size too large: {}", error) - } - DeployConfigFailure::ExcessiveTimeToLive { max_ttl, got } => { - write!( - formatter, - "time-to-live of {} exceeds limit of {}", - got, max_ttl - ) - } - DeployConfigFailure::TimestampInFuture { - validation_timestamp, - timestamp_leeway, - got, - } => { - write!( - formatter, - "timestamp of {} is later than node's timestamp of {} plus leeway of {}", - got, validation_timestamp, timestamp_leeway - ) - } - DeployConfigFailure::InvalidBodyHash => { - write!( - formatter, - "the provided body hash does not match the actual hash of the body" - ) - } - DeployConfigFailure::InvalidDeployHash => { - write!( - formatter, - "the provided hash does not match the actual hash of the deploy" - ) - } - DeployConfigFailure::EmptyApprovals => { - write!(formatter, "the deploy has no approvals") - } - DeployConfigFailure::InvalidApproval { index, error } => { - write!( - formatter, - "the approval at index {} is invalid: {}", - index, error - ) - } - DeployConfigFailure::ExcessiveSessionArgsLength { max_length, got } => { - write!( - formatter, - "serialized session code runtime args of {} exceeds limit of {}", - got, max_length - ) - } - DeployConfigFailure::ExcessivePaymentArgsLength { max_length, got } => { - write!( - formatter, - "serialized payment code runtime args of {} exceeds limit of {}", - got, max_length - ) - } - DeployConfigFailure::MissingPaymentAmount => { - write!(formatter, "missing payment 'amount' runtime argument") - } - DeployConfigFailure::FailedToParsePaymentAmount => { - write!(formatter, "failed to parse payment 'amount' as U512") - } - DeployConfigFailure::ExceededBlockGasLimit { - block_gas_limit, - got, - } => { - write!( - formatter, - "payment amount of {} exceeds the block gas limit of {}", - got, block_gas_limit - ) - } - DeployConfigFailure::MissingTransferAmount => { - write!(formatter, "missing transfer 'amount' runtime argument") - } - DeployConfigFailure::FailedToParseTransferAmount => { - write!(formatter, "failed to parse transfer 'amount' as U512") - } - DeployConfigFailure::InsufficientTransferAmount { minimum, attempted } => { - write!( - formatter, - "insufficient transfer amount; minimum: {} attempted: {}", - minimum, attempted - ) - } - DeployConfigFailure::ExcessiveApprovals { - got, - max_associated_keys, - } => { - write!( - formatter, - "number of approvals {} exceeds the maximum number of associated keys {}", - got, max_associated_keys - ) - } - } - } -} - -impl From for DeployConfigFailure { - fn from(error: ExcessiveSizeError) -> Self { - DeployConfigFailure::ExcessiveSize(error) - } -} - -#[cfg(feature = "std")] -impl StdError for DeployConfigFailure { - fn source(&self) -> Option<&(dyn StdError + 'static)> { - match self { - DeployConfigFailure::InvalidApproval { error, .. } => Some(error), - DeployConfigFailure::InvalidChainName { .. } - | DeployConfigFailure::ExcessiveDependencies { .. } - | DeployConfigFailure::ExcessiveSize(_) - | DeployConfigFailure::ExcessiveTimeToLive { .. } - | DeployConfigFailure::TimestampInFuture { .. } - | DeployConfigFailure::InvalidBodyHash - | DeployConfigFailure::InvalidDeployHash - | DeployConfigFailure::EmptyApprovals - | DeployConfigFailure::ExcessiveSessionArgsLength { .. } - | DeployConfigFailure::ExcessivePaymentArgsLength { .. } - | DeployConfigFailure::MissingPaymentAmount - | DeployConfigFailure::FailedToParsePaymentAmount - | DeployConfigFailure::ExceededBlockGasLimit { .. } - | DeployConfigFailure::MissingTransferAmount - | DeployConfigFailure::FailedToParseTransferAmount - | DeployConfigFailure::InsufficientTransferAmount { .. } - | DeployConfigFailure::ExcessiveApprovals { .. } => None, - } - } -} - -/// Error returned when a Deploy is too large. -#[derive(Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Debug, Serialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -pub struct ExcessiveSizeError { - /// The maximum permitted serialized deploy size, in bytes. - pub max_transaction_size: u32, - /// The serialized size of the deploy provided, in bytes. - pub actual_deploy_size: usize, -} - -impl Display for ExcessiveSizeError { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - write!( - formatter, - "deploy size of {} bytes exceeds limit of {}", - self.actual_deploy_size, self.max_transaction_size - ) - } -} - -#[cfg(feature = "std")] -impl StdError for ExcessiveSizeError {} - -/// Errors other than validation failures relating to `Deploy`s. -#[derive(Debug)] -#[non_exhaustive] -pub enum Error { - /// Error while encoding to JSON. - EncodeToJson(serde_json::Error), - - /// Error while decoding from JSON. - DecodeFromJson(DecodeFromJsonError), - - /// Failed to get "amount" from `payment()`'s runtime args. - InvalidPayment, -} - -impl From for Error { - fn from(error: serde_json::Error) -> Self { - Error::EncodeToJson(error) - } -} - -impl From for Error { - fn from(error: DecodeFromJsonError) -> Self { - Error::DecodeFromJson(error) - } -} - -impl Display for Error { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - match self { - Error::EncodeToJson(error) => { - write!(formatter, "encoding to json: {}", error) - } - Error::DecodeFromJson(error) => { - write!(formatter, "decoding from json: {}", error) - } - Error::InvalidPayment => { - write!(formatter, "invalid payment: missing 'amount' arg") - } - } - } -} - -#[cfg(feature = "std")] -impl StdError for Error { - fn source(&self) -> Option<&(dyn StdError + 'static)> { - match self { - Error::EncodeToJson(error) => Some(error), - Error::DecodeFromJson(error) => Some(error), - Error::InvalidPayment => None, - } - } -} - -/// Error while decoding a `Deploy` from JSON. -#[derive(Debug)] -#[non_exhaustive] -pub enum DecodeFromJsonError { - /// Failed to decode from base 16. - FromHex(base16::DecodeError), - - /// Failed to convert slice to array. - TryFromSlice(TryFromSliceError), -} - -impl From for DecodeFromJsonError { - fn from(error: base16::DecodeError) -> Self { - DecodeFromJsonError::FromHex(error) - } -} - -impl From for DecodeFromJsonError { - fn from(error: TryFromSliceError) -> Self { - DecodeFromJsonError::TryFromSlice(error) - } -} - -impl Display for DecodeFromJsonError { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - match self { - DecodeFromJsonError::FromHex(error) => { - write!(formatter, "{}", error) - } - DecodeFromJsonError::TryFromSlice(error) => { - write!(formatter, "{}", error) - } - } - } -} - -#[cfg(feature = "std")] -impl StdError for DecodeFromJsonError { - fn source(&self) -> Option<&(dyn StdError + 'static)> { - match self { - DecodeFromJsonError::FromHex(error) => Some(error), - DecodeFromJsonError::TryFromSlice(error) => Some(error), - } - } -} diff --git a/casper_types_ver_2_0/src/transaction/deploy/executable_deploy_item.rs b/casper_types_ver_2_0/src/transaction/deploy/executable_deploy_item.rs deleted file mode 100644 index e553a87c..00000000 --- a/casper_types_ver_2_0/src/transaction/deploy/executable_deploy_item.rs +++ /dev/null @@ -1,827 +0,0 @@ -use alloc::{string::String, vec::Vec}; -use core::fmt::{self, Debug, Display, Formatter}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -use hex_fmt::HexFmt; -#[cfg(any(feature = "testing", test))] -use rand::{ - distributions::{Alphanumeric, Distribution, Standard}, - Rng, -}; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -#[cfg(doc)] -use super::Deploy; -use crate::{ - account::AccountHash, - addressable_entity::DEFAULT_ENTRY_POINT_NAME, - bytesrepr::{self, Bytes, FromBytes, ToBytes, U8_SERIALIZED_LENGTH}, - package::{EntityVersion, PackageHash}, - runtime_args, serde_helpers, - system::mint::ARG_AMOUNT, - AddressableEntityHash, AddressableEntityIdentifier, Gas, Motes, PackageIdentifier, Phase, - PublicKey, RuntimeArgs, URef, U512, -}; -#[cfg(any(feature = "testing", test))] -use crate::{testing::TestRng, CLValue}; - -const TAG_LENGTH: usize = U8_SERIALIZED_LENGTH; -const MODULE_BYTES_TAG: u8 = 0; -const STORED_CONTRACT_BY_HASH_TAG: u8 = 1; -const STORED_CONTRACT_BY_NAME_TAG: u8 = 2; -const STORED_VERSIONED_CONTRACT_BY_HASH_TAG: u8 = 3; -const STORED_VERSIONED_CONTRACT_BY_NAME_TAG: u8 = 4; -const TRANSFER_TAG: u8 = 5; -const TRANSFER_ARG_AMOUNT: &str = "amount"; -const TRANSFER_ARG_SOURCE: &str = "source"; -const TRANSFER_ARG_TARGET: &str = "target"; -const TRANSFER_ARG_ID: &str = "id"; - -/// Identifier for an [`ExecutableDeployItem`]. -#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] -pub enum ExecutableDeployItemIdentifier { - /// The deploy item is of the type [`ExecutableDeployItem::ModuleBytes`] - Module, - /// The deploy item is a variation of a stored contract. - AddressableEntity(AddressableEntityIdentifier), - /// The deploy item is a variation of a stored contract package. - Package(PackageIdentifier), - /// The deploy item is a native transfer. - Transfer, -} - -/// The executable component of a [`Deploy`]. -#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[serde(deny_unknown_fields)] -pub enum ExecutableDeployItem { - /// Executable specified as raw bytes that represent Wasm code and an instance of - /// [`RuntimeArgs`]. - ModuleBytes { - /// Raw Wasm module bytes with 'call' exported as an entrypoint. - #[cfg_attr( - feature = "json-schema", - schemars(description = "Hex-encoded raw Wasm bytes.") - )] - module_bytes: Bytes, - /// Runtime arguments. - args: RuntimeArgs, - }, - /// Stored contract referenced by its [`AddressableEntityHash`], entry point and an instance of - /// [`RuntimeArgs`]. - StoredContractByHash { - /// Contract hash. - #[serde(with = "serde_helpers::contract_hash_as_digest")] - #[cfg_attr( - feature = "json-schema", - schemars( - // this attribute is necessary due to a bug: https://github.com/GREsau/schemars/issues/89 - with = "AddressableEntityHash", - description = "Hex-encoded contract hash." - ) - )] - hash: AddressableEntityHash, - /// Name of an entry point. - entry_point: String, - /// Runtime arguments. - args: RuntimeArgs, - }, - /// Stored contract referenced by a named key existing in the signer's account context, entry - /// point and an instance of [`RuntimeArgs`]. - StoredContractByName { - /// Named key. - name: String, - /// Name of an entry point. - entry_point: String, - /// Runtime arguments. - args: RuntimeArgs, - }, - /// Stored versioned contract referenced by its [`PackageHash`], entry point and an - /// instance of [`RuntimeArgs`]. - StoredVersionedContractByHash { - /// Contract package hash - #[serde(with = "serde_helpers::contract_package_hash_as_digest")] - #[cfg_attr( - feature = "json-schema", - schemars( - // this attribute is necessary due to a bug: https://github.com/GREsau/schemars/issues/89 - with = "PackageHash", - description = "Hex-encoded contract package hash." - ) - )] - hash: PackageHash, - /// An optional version of the contract to call. It will default to the highest enabled - /// version if no value is specified. - version: Option, - /// Entry point name. - entry_point: String, - /// Runtime arguments. - args: RuntimeArgs, - }, - /// Stored versioned contract referenced by a named key existing in the signer's account - /// context, entry point and an instance of [`RuntimeArgs`]. - StoredVersionedContractByName { - /// Named key. - name: String, - /// An optional version of the contract to call. It will default to the highest enabled - /// version if no value is specified. - version: Option, - /// Entry point name. - entry_point: String, - /// Runtime arguments. - args: RuntimeArgs, - }, - /// A native transfer which does not contain or reference a Wasm code. - Transfer { - /// Runtime arguments. - args: RuntimeArgs, - }, -} - -impl ExecutableDeployItem { - /// Returns a new `ExecutableDeployItem::ModuleBytes`. - pub fn new_module_bytes(module_bytes: Bytes, args: RuntimeArgs) -> Self { - ExecutableDeployItem::ModuleBytes { module_bytes, args } - } - - /// Returns a new `ExecutableDeployItem::ModuleBytes` suitable for use as standard payment code - /// of a `Deploy`. - pub fn new_standard_payment>(amount: A) -> Self { - ExecutableDeployItem::ModuleBytes { - module_bytes: Bytes::new(), - args: runtime_args! { - ARG_AMOUNT => amount.into(), - }, - } - } - - /// Returns a new `ExecutableDeployItem::StoredContractByHash`. - pub fn new_stored_contract_by_hash( - hash: AddressableEntityHash, - entry_point: String, - args: RuntimeArgs, - ) -> Self { - ExecutableDeployItem::StoredContractByHash { - hash, - entry_point, - args, - } - } - - /// Returns a new `ExecutableDeployItem::StoredContractByName`. - pub fn new_stored_contract_by_name( - name: String, - entry_point: String, - args: RuntimeArgs, - ) -> Self { - ExecutableDeployItem::StoredContractByName { - name, - entry_point, - args, - } - } - - /// Returns a new `ExecutableDeployItem::StoredVersionedContractByHash`. - pub fn new_stored_versioned_contract_by_hash( - hash: PackageHash, - version: Option, - entry_point: String, - args: RuntimeArgs, - ) -> Self { - ExecutableDeployItem::StoredVersionedContractByHash { - hash, - version, - entry_point, - args, - } - } - - /// Returns a new `ExecutableDeployItem::StoredVersionedContractByName`. - pub fn new_stored_versioned_contract_by_name( - name: String, - version: Option, - entry_point: String, - args: RuntimeArgs, - ) -> Self { - ExecutableDeployItem::StoredVersionedContractByName { - name, - version, - entry_point, - args, - } - } - - /// Returns a new `ExecutableDeployItem` suitable for use as session code for a transfer. - /// - /// If `maybe_source` is None, the account's main purse is used as the source. - pub fn new_transfer>( - amount: A, - maybe_source: Option, - target: TransferTarget, - maybe_transfer_id: Option, - ) -> Self { - let mut args = RuntimeArgs::new(); - args.insert(TRANSFER_ARG_AMOUNT, amount.into()) - .expect("should serialize amount arg"); - - if let Some(source) = maybe_source { - args.insert(TRANSFER_ARG_SOURCE, source) - .expect("should serialize source arg"); - } - - match target { - TransferTarget::PublicKey(public_key) => args - .insert(TRANSFER_ARG_TARGET, public_key) - .expect("should serialize public key target arg"), - TransferTarget::AccountHash(account_hash) => args - .insert(TRANSFER_ARG_TARGET, account_hash) - .expect("should serialize account hash target arg"), - TransferTarget::URef(uref) => args - .insert(TRANSFER_ARG_TARGET, uref) - .expect("should serialize uref target arg"), - } - - args.insert(TRANSFER_ARG_ID, maybe_transfer_id) - .expect("should serialize transfer id arg"); - - ExecutableDeployItem::Transfer { args } - } - - /// Returns the entry point name. - pub fn entry_point_name(&self) -> &str { - match self { - ExecutableDeployItem::ModuleBytes { .. } | ExecutableDeployItem::Transfer { .. } => { - DEFAULT_ENTRY_POINT_NAME - } - ExecutableDeployItem::StoredVersionedContractByName { entry_point, .. } - | ExecutableDeployItem::StoredVersionedContractByHash { entry_point, .. } - | ExecutableDeployItem::StoredContractByHash { entry_point, .. } - | ExecutableDeployItem::StoredContractByName { entry_point, .. } => entry_point, - } - } - - /// Returns the identifier of the `ExecutableDeployItem`. - pub fn identifier(&self) -> ExecutableDeployItemIdentifier { - match self { - ExecutableDeployItem::ModuleBytes { .. } => ExecutableDeployItemIdentifier::Module, - ExecutableDeployItem::StoredContractByHash { hash, .. } => { - ExecutableDeployItemIdentifier::AddressableEntity( - AddressableEntityIdentifier::Hash(*hash), - ) - } - ExecutableDeployItem::StoredContractByName { name, .. } => { - ExecutableDeployItemIdentifier::AddressableEntity( - AddressableEntityIdentifier::Name(name.clone()), - ) - } - ExecutableDeployItem::StoredVersionedContractByHash { hash, version, .. } => { - ExecutableDeployItemIdentifier::Package(PackageIdentifier::Hash { - package_hash: *hash, - version: *version, - }) - } - ExecutableDeployItem::StoredVersionedContractByName { name, version, .. } => { - ExecutableDeployItemIdentifier::Package(PackageIdentifier::Name { - name: name.clone(), - version: *version, - }) - } - ExecutableDeployItem::Transfer { .. } => ExecutableDeployItemIdentifier::Transfer, - } - } - - /// Returns the identifier of the contract in the deploy item, if present. - pub fn contract_identifier(&self) -> Option { - match self { - ExecutableDeployItem::ModuleBytes { .. } - | ExecutableDeployItem::StoredVersionedContractByHash { .. } - | ExecutableDeployItem::StoredVersionedContractByName { .. } - | ExecutableDeployItem::Transfer { .. } => None, - ExecutableDeployItem::StoredContractByHash { hash, .. } => { - Some(AddressableEntityIdentifier::Hash(*hash)) - } - ExecutableDeployItem::StoredContractByName { name, .. } => { - Some(AddressableEntityIdentifier::Name(name.clone())) - } - } - } - - /// Returns the identifier of the contract package in the deploy item, if present. - pub fn contract_package_identifier(&self) -> Option { - match self { - ExecutableDeployItem::ModuleBytes { .. } - | ExecutableDeployItem::StoredContractByHash { .. } - | ExecutableDeployItem::StoredContractByName { .. } - | ExecutableDeployItem::Transfer { .. } => None, - - ExecutableDeployItem::StoredVersionedContractByHash { hash, version, .. } => { - Some(PackageIdentifier::Hash { - package_hash: *hash, - version: *version, - }) - } - ExecutableDeployItem::StoredVersionedContractByName { name, version, .. } => { - Some(PackageIdentifier::Name { - name: name.clone(), - version: *version, - }) - } - } - } - - /// Returns the runtime arguments. - pub fn args(&self) -> &RuntimeArgs { - match self { - ExecutableDeployItem::ModuleBytes { args, .. } - | ExecutableDeployItem::StoredContractByHash { args, .. } - | ExecutableDeployItem::StoredContractByName { args, .. } - | ExecutableDeployItem::StoredVersionedContractByHash { args, .. } - | ExecutableDeployItem::StoredVersionedContractByName { args, .. } - | ExecutableDeployItem::Transfer { args } => args, - } - } - - /// Returns the payment amount from args (if any) as Gas. - pub fn payment_amount(&self, conv_rate: u64) -> Option { - let cl_value = self.args().get(ARG_AMOUNT)?; - let motes = cl_value.clone().into_t::().ok()?; - Gas::from_motes(Motes::new(motes), conv_rate) - } - - /// Returns `true` if this deploy item is a native transfer. - pub fn is_transfer(&self) -> bool { - matches!(self, ExecutableDeployItem::Transfer { .. }) - } - - /// Returns `true` if this deploy item is a standard payment. - pub fn is_standard_payment(&self, phase: Phase) -> bool { - if phase != Phase::Payment { - return false; - } - - if let ExecutableDeployItem::ModuleBytes { module_bytes, .. } = self { - return module_bytes.is_empty(); - } - - false - } - - /// Returns `true` if the deploy item is a contract identified by its name. - pub fn is_by_name(&self) -> bool { - matches!( - self, - ExecutableDeployItem::StoredVersionedContractByName { .. } - ) || matches!(self, ExecutableDeployItem::StoredContractByName { .. }) - } - - /// Returns the name of the contract or contract package, if the deploy item is identified by - /// name. - pub fn by_name(&self) -> Option { - match self { - ExecutableDeployItem::StoredContractByName { name, .. } - | ExecutableDeployItem::StoredVersionedContractByName { name, .. } => { - Some(name.clone()) - } - ExecutableDeployItem::ModuleBytes { .. } - | ExecutableDeployItem::StoredContractByHash { .. } - | ExecutableDeployItem::StoredVersionedContractByHash { .. } - | ExecutableDeployItem::Transfer { .. } => None, - } - } - - /// Returns `true` if the deploy item is a stored contract. - pub fn is_stored_contract(&self) -> bool { - matches!(self, ExecutableDeployItem::StoredContractByHash { .. }) - || matches!(self, ExecutableDeployItem::StoredContractByName { .. }) - } - - /// Returns `true` if the deploy item is a stored contract package. - pub fn is_stored_contract_package(&self) -> bool { - matches!( - self, - ExecutableDeployItem::StoredVersionedContractByHash { .. } - ) || matches!( - self, - ExecutableDeployItem::StoredVersionedContractByName { .. } - ) - } - - /// Returns `true` if the deploy item is [`ModuleBytes`]. - /// - /// [`ModuleBytes`]: ExecutableDeployItem::ModuleBytes - pub fn is_module_bytes(&self) -> bool { - matches!(self, Self::ModuleBytes { .. }) - } - - /// Returns a random `ExecutableDeployItem`. - #[cfg(any(feature = "testing", test))] - pub fn random(rng: &mut TestRng) -> Self { - rng.gen() - } -} - -impl ToBytes for ExecutableDeployItem { - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - match self { - ExecutableDeployItem::ModuleBytes { module_bytes, args } => { - writer.push(MODULE_BYTES_TAG); - module_bytes.write_bytes(writer)?; - args.write_bytes(writer) - } - ExecutableDeployItem::StoredContractByHash { - hash, - entry_point, - args, - } => { - writer.push(STORED_CONTRACT_BY_HASH_TAG); - hash.write_bytes(writer)?; - entry_point.write_bytes(writer)?; - args.write_bytes(writer) - } - ExecutableDeployItem::StoredContractByName { - name, - entry_point, - args, - } => { - writer.push(STORED_CONTRACT_BY_NAME_TAG); - name.write_bytes(writer)?; - entry_point.write_bytes(writer)?; - args.write_bytes(writer) - } - ExecutableDeployItem::StoredVersionedContractByHash { - hash, - version, - entry_point, - args, - } => { - writer.push(STORED_VERSIONED_CONTRACT_BY_HASH_TAG); - hash.write_bytes(writer)?; - version.write_bytes(writer)?; - entry_point.write_bytes(writer)?; - args.write_bytes(writer) - } - ExecutableDeployItem::StoredVersionedContractByName { - name, - version, - entry_point, - args, - } => { - writer.push(STORED_VERSIONED_CONTRACT_BY_NAME_TAG); - name.write_bytes(writer)?; - version.write_bytes(writer)?; - entry_point.write_bytes(writer)?; - args.write_bytes(writer) - } - ExecutableDeployItem::Transfer { args } => { - writer.push(TRANSFER_TAG); - args.write_bytes(writer) - } - } - } - - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - TAG_LENGTH - + match self { - ExecutableDeployItem::ModuleBytes { module_bytes, args } => { - module_bytes.serialized_length() + args.serialized_length() - } - ExecutableDeployItem::StoredContractByHash { - hash, - entry_point, - args, - } => { - hash.serialized_length() - + entry_point.serialized_length() - + args.serialized_length() - } - ExecutableDeployItem::StoredContractByName { - name, - entry_point, - args, - } => { - name.serialized_length() - + entry_point.serialized_length() - + args.serialized_length() - } - ExecutableDeployItem::StoredVersionedContractByHash { - hash, - version, - entry_point, - args, - } => { - hash.serialized_length() - + version.serialized_length() - + entry_point.serialized_length() - + args.serialized_length() - } - ExecutableDeployItem::StoredVersionedContractByName { - name, - version, - entry_point, - args, - } => { - name.serialized_length() - + version.serialized_length() - + entry_point.serialized_length() - + args.serialized_length() - } - ExecutableDeployItem::Transfer { args } => args.serialized_length(), - } - } -} - -impl FromBytes for ExecutableDeployItem { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (tag, remainder) = u8::from_bytes(bytes)?; - match tag { - MODULE_BYTES_TAG => { - let (module_bytes, remainder) = Bytes::from_bytes(remainder)?; - let (args, remainder) = RuntimeArgs::from_bytes(remainder)?; - Ok(( - ExecutableDeployItem::ModuleBytes { module_bytes, args }, - remainder, - )) - } - STORED_CONTRACT_BY_HASH_TAG => { - let (hash, remainder) = AddressableEntityHash::from_bytes(remainder)?; - let (entry_point, remainder) = String::from_bytes(remainder)?; - let (args, remainder) = RuntimeArgs::from_bytes(remainder)?; - Ok(( - ExecutableDeployItem::StoredContractByHash { - hash, - entry_point, - args, - }, - remainder, - )) - } - STORED_CONTRACT_BY_NAME_TAG => { - let (name, remainder) = String::from_bytes(remainder)?; - let (entry_point, remainder) = String::from_bytes(remainder)?; - let (args, remainder) = RuntimeArgs::from_bytes(remainder)?; - Ok(( - ExecutableDeployItem::StoredContractByName { - name, - entry_point, - args, - }, - remainder, - )) - } - STORED_VERSIONED_CONTRACT_BY_HASH_TAG => { - let (hash, remainder) = PackageHash::from_bytes(remainder)?; - let (version, remainder) = Option::::from_bytes(remainder)?; - let (entry_point, remainder) = String::from_bytes(remainder)?; - let (args, remainder) = RuntimeArgs::from_bytes(remainder)?; - Ok(( - ExecutableDeployItem::StoredVersionedContractByHash { - hash, - version, - entry_point, - args, - }, - remainder, - )) - } - STORED_VERSIONED_CONTRACT_BY_NAME_TAG => { - let (name, remainder) = String::from_bytes(remainder)?; - let (version, remainder) = Option::::from_bytes(remainder)?; - let (entry_point, remainder) = String::from_bytes(remainder)?; - let (args, remainder) = RuntimeArgs::from_bytes(remainder)?; - Ok(( - ExecutableDeployItem::StoredVersionedContractByName { - name, - version, - entry_point, - args, - }, - remainder, - )) - } - TRANSFER_TAG => { - let (args, remainder) = RuntimeArgs::from_bytes(remainder)?; - Ok((ExecutableDeployItem::Transfer { args }, remainder)) - } - _ => Err(bytesrepr::Error::Formatting), - } - } -} - -impl Display for ExecutableDeployItem { - fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { - match self { - ExecutableDeployItem::ModuleBytes { module_bytes, .. } => { - write!(f, "module-bytes [{} bytes]", module_bytes.len()) - } - ExecutableDeployItem::StoredContractByHash { - hash, entry_point, .. - } => write!( - f, - "stored-contract-by-hash: {:10}, entry-point: {}", - HexFmt(hash), - entry_point, - ), - ExecutableDeployItem::StoredContractByName { - name, entry_point, .. - } => write!( - f, - "stored-contract-by-name: {}, entry-point: {}", - name, entry_point, - ), - ExecutableDeployItem::StoredVersionedContractByHash { - hash, - version: Some(ver), - entry_point, - .. - } => write!( - f, - "stored-versioned-contract-by-hash: {:10}, version: {}, entry-point: {}", - HexFmt(hash), - ver, - entry_point, - ), - ExecutableDeployItem::StoredVersionedContractByHash { - hash, entry_point, .. - } => write!( - f, - "stored-versioned-contract-by-hash: {:10}, version: latest, entry-point: {}", - HexFmt(hash), - entry_point, - ), - ExecutableDeployItem::StoredVersionedContractByName { - name, - version: Some(ver), - entry_point, - .. - } => write!( - f, - "stored-versioned-contract: {}, version: {}, entry-point: {}", - name, ver, entry_point, - ), - ExecutableDeployItem::StoredVersionedContractByName { - name, entry_point, .. - } => write!( - f, - "stored-versioned-contract: {}, version: latest, entry-point: {}", - name, entry_point, - ), - ExecutableDeployItem::Transfer { .. } => write!(f, "transfer"), - } - } -} - -impl Debug for ExecutableDeployItem { - fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { - match self { - ExecutableDeployItem::ModuleBytes { module_bytes, args } => f - .debug_struct("ModuleBytes") - .field("module_bytes", &format!("[{} bytes]", module_bytes.len())) - .field("args", args) - .finish(), - ExecutableDeployItem::StoredContractByHash { - hash, - entry_point, - args, - } => f - .debug_struct("StoredContractByHash") - .field("hash", &base16::encode_lower(hash)) - .field("entry_point", &entry_point) - .field("args", args) - .finish(), - ExecutableDeployItem::StoredContractByName { - name, - entry_point, - args, - } => f - .debug_struct("StoredContractByName") - .field("name", &name) - .field("entry_point", &entry_point) - .field("args", args) - .finish(), - ExecutableDeployItem::StoredVersionedContractByHash { - hash, - version, - entry_point, - args, - } => f - .debug_struct("StoredVersionedContractByHash") - .field("hash", &base16::encode_lower(hash)) - .field("version", version) - .field("entry_point", &entry_point) - .field("args", args) - .finish(), - ExecutableDeployItem::StoredVersionedContractByName { - name, - version, - entry_point, - args, - } => f - .debug_struct("StoredVersionedContractByName") - .field("name", &name) - .field("version", version) - .field("entry_point", &entry_point) - .field("args", args) - .finish(), - ExecutableDeployItem::Transfer { args } => { - f.debug_struct("Transfer").field("args", args).finish() - } - } - } -} - -#[cfg(any(feature = "testing", test))] -impl Distribution for Standard { - fn sample(&self, rng: &mut R) -> ExecutableDeployItem { - fn random_bytes(rng: &mut R) -> Vec { - let mut bytes = vec![0u8; rng.gen_range(0..100)]; - rng.fill_bytes(bytes.as_mut()); - bytes - } - - fn random_string(rng: &mut R) -> String { - rng.sample_iter(&Alphanumeric) - .take(20) - .map(char::from) - .collect() - } - - let mut args = RuntimeArgs::new(); - let _ = args.insert(random_string(rng), Bytes::from(random_bytes(rng))); - - match rng.gen_range(0..5) { - 0 => ExecutableDeployItem::ModuleBytes { - module_bytes: random_bytes(rng).into(), - args, - }, - 1 => ExecutableDeployItem::StoredContractByHash { - hash: AddressableEntityHash::new(rng.gen()), - entry_point: random_string(rng), - args, - }, - 2 => ExecutableDeployItem::StoredContractByName { - name: random_string(rng), - entry_point: random_string(rng), - args, - }, - 3 => ExecutableDeployItem::StoredVersionedContractByHash { - hash: PackageHash::new(rng.gen()), - version: rng.gen(), - entry_point: random_string(rng), - args, - }, - 4 => ExecutableDeployItem::StoredVersionedContractByName { - name: random_string(rng), - version: rng.gen(), - entry_point: random_string(rng), - args, - }, - 5 => { - let amount = rng.gen_range(2_500_000_000_u64..1_000_000_000_000_000); - let mut transfer_args = RuntimeArgs::new(); - transfer_args.insert_cl_value( - ARG_AMOUNT, - CLValue::from_t(U512::from(amount)).expect("should get CLValue from U512"), - ); - ExecutableDeployItem::Transfer { - args: transfer_args, - } - } - _ => unreachable!(), - } - } -} - -/// The various types which can be used as the `target` runtime argument of a native transfer. -#[derive(Clone, Ord, PartialOrd, Eq, PartialEq)] -pub enum TransferTarget { - /// A public key. - PublicKey(PublicKey), - /// An account hash. - AccountHash(AccountHash), - /// A URef. - URef(URef), -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn serialization_roundtrip() { - let rng = &mut TestRng::new(); - for _ in 0..10 { - let executable_deploy_item = ExecutableDeployItem::random(rng); - bytesrepr::test_serialization_roundtrip(&executable_deploy_item); - } - } -} diff --git a/casper_types_ver_2_0/src/transaction/deploy/finalized_deploy_approvals.rs b/casper_types_ver_2_0/src/transaction/deploy/finalized_deploy_approvals.rs deleted file mode 100644 index 37fb66ad..00000000 --- a/casper_types_ver_2_0/src/transaction/deploy/finalized_deploy_approvals.rs +++ /dev/null @@ -1,76 +0,0 @@ -use alloc::{collections::BTreeSet, vec::Vec}; -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(any(feature = "testing", test))] -use rand::Rng; -use serde::{Deserialize, Serialize}; - -#[cfg(any(feature = "testing", test))] -use crate::testing::TestRng; -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - DeployApproval, -}; - -/// A set of approvals that has been agreed upon by consensus to approve of a specific deploy. -#[derive(Clone, Eq, PartialEq, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -pub struct FinalizedDeployApprovals(BTreeSet); - -impl FinalizedDeployApprovals { - /// Creates a new set of finalized deploy approvals. - pub fn new(approvals: BTreeSet) -> Self { - Self(approvals) - } - - /// Returns the inner `BTreeSet` of approvals. - pub fn inner(&self) -> &BTreeSet { - &self.0 - } - - /// Converts this set of deploy approvals into the inner `BTreeSet`. - pub fn into_inner(self) -> BTreeSet { - self.0 - } - - /// Returns a random FinalizedDeployApprovals. - #[cfg(any(feature = "testing", test))] - pub fn random(rng: &mut TestRng) -> Self { - let count = rng.gen_range(1..10); - let approvals = (0..count).map(|_| DeployApproval::random(rng)).collect(); - FinalizedDeployApprovals(approvals) - } -} - -impl ToBytes for FinalizedDeployApprovals { - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.0.write_bytes(writer) - } - - fn to_bytes(&self) -> Result, bytesrepr::Error> { - self.0.to_bytes() - } - - fn serialized_length(&self) -> usize { - self.0.serialized_length() - } -} - -impl FromBytes for FinalizedDeployApprovals { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (approvals, remainder) = BTreeSet::::from_bytes(bytes)?; - Ok((FinalizedDeployApprovals(approvals), remainder)) - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn bytesrepr_roundtrip() { - let rng = &mut TestRng::new(); - let approvals = FinalizedDeployApprovals::random(rng); - bytesrepr::test_serialization_roundtrip(&approvals); - } -} diff --git a/casper_types_ver_2_0/src/transaction/execution_info.rs b/casper_types_ver_2_0/src/transaction/execution_info.rs deleted file mode 100644 index 26303f5c..00000000 --- a/casper_types_ver_2_0/src/transaction/execution_info.rs +++ /dev/null @@ -1,62 +0,0 @@ -use alloc::vec::Vec; - -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - execution::ExecutionResult, - BlockHash, -}; - -/// The block hash and height in which a given deploy was executed, along with the execution result -/// if known. -#[derive(Clone, Serialize, Deserialize, Debug, PartialEq, Eq)] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[serde(deny_unknown_fields)] -pub struct ExecutionInfo { - /// The hash of the block in which the deploy was executed. - pub block_hash: BlockHash, - /// The height of the block in which the deploy was executed. - pub block_height: u64, - /// The execution result if known. - pub execution_result: Option, -} - -impl FromBytes for ExecutionInfo { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (block_hash, bytes) = FromBytes::from_bytes(bytes)?; - let (block_height, bytes) = FromBytes::from_bytes(bytes)?; - let (execution_result, bytes) = FromBytes::from_bytes(bytes)?; - Ok(( - ExecutionInfo { - block_hash, - block_height, - execution_result, - }, - bytes, - )) - } -} - -impl ToBytes for ExecutionInfo { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut result = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut result)?; - Ok(result) - } - - fn write_bytes(&self, bytes: &mut Vec) -> Result<(), bytesrepr::Error> { - self.block_hash.write_bytes(bytes)?; - self.block_height.write_bytes(bytes)?; - self.execution_result.write_bytes(bytes)?; - Ok(()) - } - - fn serialized_length(&self) -> usize { - self.block_hash.serialized_length() - + self.block_height.serialized_length() - + self.execution_result.serialized_length() - } -} diff --git a/casper_types_ver_2_0/src/transaction/finalized_approvals.rs b/casper_types_ver_2_0/src/transaction/finalized_approvals.rs deleted file mode 100644 index 708873d2..00000000 --- a/casper_types_ver_2_0/src/transaction/finalized_approvals.rs +++ /dev/null @@ -1,128 +0,0 @@ -#[cfg(any(feature = "testing", test))] -use crate::testing::TestRng; -use alloc::vec::Vec; -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(any(feature = "testing", test))] -use rand::Rng; -use serde::{Deserialize, Serialize}; - -use crate::{ - bytesrepr::{self, FromBytes, ToBytes, U8_SERIALIZED_LENGTH}, - Transaction, -}; - -use super::{deploy::FinalizedDeployApprovals, transaction_v1::FinalizedTransactionV1Approvals}; - -const DEPLOY_TAG: u8 = 0; -const V1_TAG: u8 = 1; - -/// A set of approvals that has been agreed upon by consensus to approve of a specific transaction. -#[derive(Clone, Eq, PartialEq, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -pub enum FinalizedApprovals { - /// Approvals for a Deploy. - Deploy(FinalizedDeployApprovals), - /// Approvals for a TransactionV1. - V1(FinalizedTransactionV1Approvals), -} - -impl FinalizedApprovals { - /// Creates a new set of finalized approvals from a transaction. - pub fn new(transaction: &Transaction) -> Self { - match transaction { - Transaction::Deploy(deploy) => { - Self::Deploy(FinalizedDeployApprovals::new(deploy.approvals().clone())) - } - Transaction::V1(txn) => Self::V1(FinalizedTransactionV1Approvals::new( - txn.approvals().clone(), - )), - } - } - - /// Returns a random FinalizedApprovals. - #[cfg(any(feature = "testing", test))] - pub fn random(rng: &mut TestRng) -> Self { - if rng.gen_bool(0.5) { - Self::Deploy(FinalizedDeployApprovals::random(rng)) - } else { - Self::V1(FinalizedTransactionV1Approvals::random(rng)) - } - } -} - -impl From for FinalizedApprovals { - fn from(approvals: FinalizedDeployApprovals) -> Self { - Self::Deploy(approvals) - } -} - -impl From for FinalizedApprovals { - fn from(approvals: FinalizedTransactionV1Approvals) -> Self { - Self::V1(approvals) - } -} - -impl ToBytes for FinalizedApprovals { - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - match self { - FinalizedApprovals::Deploy(approvals) => { - DEPLOY_TAG.write_bytes(writer)?; - approvals.write_bytes(writer) - } - FinalizedApprovals::V1(approvals) => { - V1_TAG.write_bytes(writer)?; - approvals.write_bytes(writer) - } - } - } - - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - U8_SERIALIZED_LENGTH - + match self { - FinalizedApprovals::Deploy(approvals) => approvals.serialized_length(), - FinalizedApprovals::V1(approvals) => approvals.serialized_length(), - } - } -} - -impl FromBytes for FinalizedApprovals { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (tag, remainder) = u8::from_bytes(bytes)?; - match tag { - DEPLOY_TAG => { - let (approvals, remainder) = FinalizedDeployApprovals::from_bytes(remainder)?; - Ok((FinalizedApprovals::Deploy(approvals), remainder)) - } - V1_TAG => { - let (approvals, remainder) = - FinalizedTransactionV1Approvals::from_bytes(remainder)?; - Ok((FinalizedApprovals::V1(approvals), remainder)) - } - _ => Err(bytesrepr::Error::Formatting), - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::testing::TestRng; - - #[test] - fn bytesrepr_roundtrip() { - let rng = &mut TestRng::new(); - - let approvals = FinalizedApprovals::from(FinalizedDeployApprovals::random(rng)); - bytesrepr::test_serialization_roundtrip(&approvals); - - let approvals = FinalizedApprovals::from(FinalizedTransactionV1Approvals::random(rng)); - bytesrepr::test_serialization_roundtrip(&approvals); - } -} diff --git a/casper_types_ver_2_0/src/transaction/initiator_addr.rs b/casper_types_ver_2_0/src/transaction/initiator_addr.rs deleted file mode 100644 index 0f09d6f9..00000000 --- a/casper_types_ver_2_0/src/transaction/initiator_addr.rs +++ /dev/null @@ -1,165 +0,0 @@ -use alloc::vec::Vec; -use core::fmt::{self, Debug, Display, Formatter}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -use hex_fmt::HexFmt; -#[cfg(any(feature = "testing", test))] -use rand::Rng; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -#[cfg(doc)] -use super::TransactionV1; -#[cfg(any(feature = "testing", test))] -use crate::testing::TestRng; -use crate::{ - account::AccountHash, - bytesrepr::{self, FromBytes, ToBytes, U8_SERIALIZED_LENGTH}, - serde_helpers, EntityAddr, PublicKey, -}; - -const PUBLIC_KEY_TAG: u8 = 0; -const ACCOUNT_HASH_TAG: u8 = 1; -const ENTITY_ADDR_TAG: u8 = 2; - -/// The address of the initiator of a [`TransactionV1`]. -#[derive(Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Serialize, Deserialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr( - feature = "json-schema", - derive(JsonSchema), - schemars(description = "The address of the initiator of a TransactionV1.") -)] -#[serde(deny_unknown_fields)] -pub enum InitiatorAddr { - /// The public key of the initiator. - PublicKey(PublicKey), - /// The account hash derived from the public key of the initiator. - AccountHash(AccountHash), - /// The entity address of the initiator. - #[serde(with = "serde_helpers::raw_32_byte_array")] - #[cfg_attr( - feature = "json-schema", - schemars( - with = "String", - description = "Hex-encoded entity address of the initiator." - ) - )] - EntityAddr(EntityAddr), -} - -impl InitiatorAddr { - /// Returns a random `InitiatorAddr`. - #[cfg(any(feature = "testing", test))] - pub fn random(rng: &mut TestRng) -> Self { - match rng.gen_range(0..3) { - PUBLIC_KEY_TAG => InitiatorAddr::PublicKey(PublicKey::random(rng)), - ACCOUNT_HASH_TAG => InitiatorAddr::AccountHash(rng.gen()), - ENTITY_ADDR_TAG => InitiatorAddr::EntityAddr(rng.gen()), - _ => unreachable!(), - } - } -} - -impl Display for InitiatorAddr { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - match self { - InitiatorAddr::PublicKey(public_key) => write!(formatter, "{}", public_key), - InitiatorAddr::AccountHash(account_hash) => { - write!(formatter, "account-hash({})", account_hash) - } - InitiatorAddr::EntityAddr(entity_addr) => { - write!(formatter, "entity-addr({:10})", HexFmt(entity_addr)) - } - } - } -} - -impl Debug for InitiatorAddr { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - match self { - InitiatorAddr::PublicKey(public_key) => formatter - .debug_tuple("PublicKey") - .field(public_key) - .finish(), - InitiatorAddr::AccountHash(account_hash) => formatter - .debug_tuple("AccountHash") - .field(account_hash) - .finish(), - InitiatorAddr::EntityAddr(entity_addr) => formatter - .debug_tuple("EntityAddr") - .field(&HexFmt(entity_addr)) - .finish(), - } - } -} - -impl ToBytes for InitiatorAddr { - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - match self { - InitiatorAddr::PublicKey(public_key) => { - PUBLIC_KEY_TAG.write_bytes(writer)?; - public_key.write_bytes(writer) - } - InitiatorAddr::AccountHash(account_hash) => { - ACCOUNT_HASH_TAG.write_bytes(writer)?; - account_hash.write_bytes(writer) - } - InitiatorAddr::EntityAddr(entity_addr) => { - ENTITY_ADDR_TAG.write_bytes(writer)?; - entity_addr.write_bytes(writer) - } - } - } - - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - U8_SERIALIZED_LENGTH - + match self { - InitiatorAddr::PublicKey(public_key) => public_key.serialized_length(), - InitiatorAddr::AccountHash(account_hash) => account_hash.serialized_length(), - InitiatorAddr::EntityAddr(entity_addr) => entity_addr.serialized_length(), - } - } -} - -impl FromBytes for InitiatorAddr { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (tag, remainder) = u8::from_bytes(bytes)?; - match tag { - PUBLIC_KEY_TAG => { - let (public_key, remainder) = PublicKey::from_bytes(remainder)?; - Ok((InitiatorAddr::PublicKey(public_key), remainder)) - } - ACCOUNT_HASH_TAG => { - let (account_hash, remainder) = AccountHash::from_bytes(remainder)?; - Ok((InitiatorAddr::AccountHash(account_hash), remainder)) - } - ENTITY_ADDR_TAG => { - let (entity_addr, remainder) = EntityAddr::from_bytes(remainder)?; - Ok((InitiatorAddr::EntityAddr(entity_addr), remainder)) - } - _ => Err(bytesrepr::Error::Formatting), - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn bytesrepr_roundtrip() { - let rng = &mut TestRng::new(); - for _ in 0..10 { - bytesrepr::test_serialization_roundtrip(&InitiatorAddr::random(rng)); - } - } -} diff --git a/casper_types_ver_2_0/src/transaction/initiator_addr_and_secret_key.rs b/casper_types_ver_2_0/src/transaction/initiator_addr_and_secret_key.rs deleted file mode 100644 index d503e0a8..00000000 --- a/casper_types_ver_2_0/src/transaction/initiator_addr_and_secret_key.rs +++ /dev/null @@ -1,40 +0,0 @@ -use crate::{InitiatorAddr, PublicKey, SecretKey}; - -/// Used when constructing a deploy or transaction. -#[derive(Debug)] -pub(super) enum InitiatorAddrAndSecretKey<'a> { - /// Provides both the initiator address and the secret key (not necessarily for the same - /// initiator address) used to sign the deploy or transaction. - Both { - /// The initiator address of the account. - initiator_addr: InitiatorAddr, - /// The secret key used to sign the deploy or transaction. - secret_key: &'a SecretKey, - }, - /// The initiator address only (no secret key). The deploy or transaction will be created - /// unsigned. - InitiatorAddr(InitiatorAddr), - /// The initiator address will be derived from the provided secret key, and the deploy or - /// transaction will be signed by the same secret key. - SecretKey(&'a SecretKey), -} - -impl<'a> InitiatorAddrAndSecretKey<'a> { - pub fn initiator_addr(&self) -> InitiatorAddr { - match self { - InitiatorAddrAndSecretKey::Both { initiator_addr, .. } - | InitiatorAddrAndSecretKey::InitiatorAddr(initiator_addr) => initiator_addr.clone(), - InitiatorAddrAndSecretKey::SecretKey(secret_key) => { - InitiatorAddr::PublicKey(PublicKey::from(*secret_key)) - } - } - } - - pub fn secret_key(&self) -> Option<&SecretKey> { - match self { - InitiatorAddrAndSecretKey::Both { secret_key, .. } - | InitiatorAddrAndSecretKey::SecretKey(secret_key) => Some(secret_key), - InitiatorAddrAndSecretKey::InitiatorAddr(_) => None, - } - } -} diff --git a/casper_types_ver_2_0/src/transaction/package_identifier.rs b/casper_types_ver_2_0/src/transaction/package_identifier.rs deleted file mode 100644 index 29cdb623..00000000 --- a/casper_types_ver_2_0/src/transaction/package_identifier.rs +++ /dev/null @@ -1,191 +0,0 @@ -use alloc::{string::String, vec::Vec}; -use core::fmt::{self, Display, Formatter}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -use hex_fmt::HexFmt; -#[cfg(any(feature = "testing", test))] -use rand::Rng; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -#[cfg(any(feature = "testing", test))] -use crate::testing::TestRng; -use crate::{ - bytesrepr::{self, FromBytes, ToBytes, U8_SERIALIZED_LENGTH}, - EntityVersion, PackageHash, -}; -#[cfg(doc)] -use crate::{ExecutableDeployItem, TransactionTarget}; - -const HASH_TAG: u8 = 0; -const NAME_TAG: u8 = 1; - -/// Identifier for the package object within a [`TransactionTarget::Stored`] or an -/// [`ExecutableDeployItem`]. -#[derive(Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr( - feature = "json-schema", - derive(JsonSchema), - schemars( - description = "Identifier for the package object within a `Stored` transaction target or \ - an `ExecutableDeployItem`." - ) -)] -pub enum PackageIdentifier { - /// The hash and optional version identifying the contract package. - Hash { - /// The hash of the contract package. - package_hash: PackageHash, - /// The version of the contract package. - /// - /// `None` implies latest version. - version: Option, - }, - /// The name and optional version identifying the contract package. - Name { - /// The name of the contract package. - name: String, - /// The version of the contract package. - /// - /// `None` implies latest version. - version: Option, - }, -} - -impl PackageIdentifier { - /// Returns the optional version of the contract package. - /// - /// `None` implies latest version. - pub fn version(&self) -> Option { - match self { - PackageIdentifier::Hash { version, .. } | PackageIdentifier::Name { version, .. } => { - *version - } - } - } - - /// Returns a random `PackageIdentifier`. - #[cfg(any(feature = "testing", test))] - pub fn random(rng: &mut TestRng) -> Self { - let version = rng.gen::().then(|| rng.gen::()); - if rng.gen() { - PackageIdentifier::Hash { - package_hash: PackageHash::new(rng.gen()), - version, - } - } else { - PackageIdentifier::Name { - name: rng.random_string(1..21), - version, - } - } - } -} - -impl Display for PackageIdentifier { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - match self { - PackageIdentifier::Hash { - package_hash: contract_package_hash, - version: Some(ver), - } => write!( - formatter, - "package-id({}, version {})", - HexFmt(contract_package_hash), - ver - ), - PackageIdentifier::Hash { - package_hash: contract_package_hash, - .. - } => write!( - formatter, - "package-id({}, latest)", - HexFmt(contract_package_hash), - ), - PackageIdentifier::Name { - name, - version: Some(ver), - } => write!(formatter, "package-id({}, version {})", name, ver), - PackageIdentifier::Name { name, .. } => { - write!(formatter, "package-id({}, latest)", name) - } - } - } -} - -impl ToBytes for PackageIdentifier { - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - match self { - PackageIdentifier::Hash { - package_hash, - version, - } => { - HASH_TAG.write_bytes(writer)?; - package_hash.write_bytes(writer)?; - version.write_bytes(writer) - } - PackageIdentifier::Name { name, version } => { - NAME_TAG.write_bytes(writer)?; - name.write_bytes(writer)?; - version.write_bytes(writer) - } - } - } - - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - U8_SERIALIZED_LENGTH - + match self { - PackageIdentifier::Hash { - package_hash, - version, - } => package_hash.serialized_length() + version.serialized_length(), - PackageIdentifier::Name { name, version } => { - name.serialized_length() + version.serialized_length() - } - } - } -} - -impl FromBytes for PackageIdentifier { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (tag, remainder) = u8::from_bytes(bytes)?; - match tag { - HASH_TAG => { - let (package_hash, remainder) = PackageHash::from_bytes(remainder)?; - let (version, remainder) = Option::::from_bytes(remainder)?; - let id = PackageIdentifier::Hash { - package_hash, - version, - }; - Ok((id, remainder)) - } - NAME_TAG => { - let (name, remainder) = String::from_bytes(remainder)?; - let (version, remainder) = Option::::from_bytes(remainder)?; - let id = PackageIdentifier::Name { name, version }; - Ok((id, remainder)) - } - _ => Err(bytesrepr::Error::Formatting), - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn bytesrepr_roundtrip() { - let rng = &mut TestRng::new(); - bytesrepr::test_serialization_roundtrip(&PackageIdentifier::random(rng)); - } -} diff --git a/casper_types_ver_2_0/src/transaction/pricing_mode.rs b/casper_types_ver_2_0/src/transaction/pricing_mode.rs deleted file mode 100644 index 97304f03..00000000 --- a/casper_types_ver_2_0/src/transaction/pricing_mode.rs +++ /dev/null @@ -1,121 +0,0 @@ -use alloc::vec::Vec; -use core::fmt::{self, Display, Formatter}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(any(feature = "testing", test))] -use rand::Rng; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -#[cfg(doc)] -use super::Transaction; -use crate::bytesrepr::{self, FromBytes, ToBytes, U8_SERIALIZED_LENGTH}; -#[cfg(any(feature = "testing", test))] -use crate::testing::TestRng; - -const GAS_PRICE_MULTIPLIER_TAG: u8 = 0; -const FIXED_TAG: u8 = 1; -const RESERVED_TAG: u8 = 2; - -/// The pricing mode of a [`Transaction`]. -#[derive(Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr( - feature = "json-schema", - derive(JsonSchema), - schemars(description = "Pricing mode of a Transaction.") -)] -#[serde(deny_unknown_fields)] -pub enum PricingMode { - /// Multiplies the gas used by the given amount. - /// - /// This is the same behaviour as for the `Deploy::gas_price`. - GasPriceMultiplier(u64), - /// First-in-first-out handling of transactions, i.e. pricing mode is irrelevant to ordering. - Fixed, - /// The payment for this transaction was previously reserved. - Reserved, -} - -impl PricingMode { - /// Returns a random `PricingMode. - #[cfg(any(feature = "testing", test))] - pub fn random(rng: &mut TestRng) -> Self { - match rng.gen_range(0..3) { - 0 => PricingMode::GasPriceMultiplier(rng.gen()), - 1 => PricingMode::Fixed, - 2 => PricingMode::Reserved, - _ => unreachable!(), - } - } -} - -impl Display for PricingMode { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - match self { - PricingMode::GasPriceMultiplier(multiplier) => { - write!(formatter, "gas price multiplier {}", multiplier) - } - PricingMode::Fixed => write!(formatter, "fixed pricing"), - PricingMode::Reserved => write!(formatter, "reserved"), - } - } -} - -impl ToBytes for PricingMode { - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - match self { - PricingMode::GasPriceMultiplier(multiplier) => { - GAS_PRICE_MULTIPLIER_TAG.write_bytes(writer)?; - multiplier.write_bytes(writer) - } - PricingMode::Fixed => FIXED_TAG.write_bytes(writer), - PricingMode::Reserved => RESERVED_TAG.write_bytes(writer), - } - } - - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - U8_SERIALIZED_LENGTH - + match self { - PricingMode::GasPriceMultiplier(multiplier) => multiplier.serialized_length(), - PricingMode::Fixed | PricingMode::Reserved => 0, - } - } -} - -impl FromBytes for PricingMode { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (tag, remainder) = u8::from_bytes(bytes)?; - match tag { - GAS_PRICE_MULTIPLIER_TAG => { - let (multiplier, remainder) = u64::from_bytes(remainder)?; - Ok((PricingMode::GasPriceMultiplier(multiplier), remainder)) - } - FIXED_TAG => Ok((PricingMode::Fixed, remainder)), - RESERVED_TAG => Ok((PricingMode::Reserved, remainder)), - _ => Err(bytesrepr::Error::Formatting), - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::testing::TestRng; - - #[test] - fn bytesrepr_roundtrip() { - let rng = &mut TestRng::new(); - for _ in 0..10 { - bytesrepr::test_serialization_roundtrip(&PricingMode::random(rng)); - } - } -} diff --git a/casper_types_ver_2_0/src/transaction/runtime_args.rs b/casper_types_ver_2_0/src/transaction/runtime_args.rs deleted file mode 100644 index fd8d4dd8..00000000 --- a/casper_types_ver_2_0/src/transaction/runtime_args.rs +++ /dev/null @@ -1,388 +0,0 @@ -//! Home of RuntimeArgs for calling contracts - -use alloc::{collections::BTreeMap, string::String, vec::Vec}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(any(feature = "testing", test))] -use rand::{Rng, RngCore}; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -#[cfg(any(feature = "testing", test))] -use crate::{bytesrepr::Bytes, testing::TestRng}; -use crate::{ - bytesrepr::{self, Error, FromBytes, ToBytes}, - CLType, CLTyped, CLValue, CLValueError, U512, -}; -/// Named arguments to a contract. -#[derive(PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -pub struct NamedArg(String, CLValue); - -impl NamedArg { - /// Returns a new `NamedArg`. - pub fn new(name: String, value: CLValue) -> Self { - NamedArg(name, value) - } - - /// Returns the name of the named arg. - pub fn name(&self) -> &str { - &self.0 - } - - /// Returns the value of the named arg. - pub fn cl_value(&self) -> &CLValue { - &self.1 - } - - /// Returns a mutable reference to the value of the named arg. - pub fn cl_value_mut(&mut self) -> &mut CLValue { - &mut self.1 - } -} - -impl From<(String, CLValue)> for NamedArg { - fn from((name, value): (String, CLValue)) -> NamedArg { - NamedArg(name, value) - } -} - -impl ToBytes for NamedArg { - fn to_bytes(&self) -> Result, Error> { - let mut result = bytesrepr::allocate_buffer(self)?; - result.append(&mut self.0.to_bytes()?); - result.append(&mut self.1.to_bytes()?); - Ok(result) - } - - fn serialized_length(&self) -> usize { - self.0.serialized_length() + self.1.serialized_length() - } -} - -impl FromBytes for NamedArg { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (name, remainder) = String::from_bytes(bytes)?; - let (cl_value, remainder) = CLValue::from_bytes(remainder)?; - Ok((NamedArg(name, cl_value), remainder)) - } -} - -/// Represents a collection of arguments passed to a smart contract. -#[derive(PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Serialize, Deserialize, Debug, Default)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -pub struct RuntimeArgs(Vec); - -impl RuntimeArgs { - /// Create an empty [`RuntimeArgs`] instance. - pub fn new() -> RuntimeArgs { - RuntimeArgs::default() - } - - /// A wrapper that lets you easily and safely create runtime arguments. - /// - /// This method is useful when you have to construct a [`RuntimeArgs`] with multiple entries, - /// but error handling at given call site would require to have a match statement for each - /// [`RuntimeArgs::insert`] call. With this method you can use ? operator inside the closure and - /// then handle single result. When `try_block` will be stabilized this method could be - /// deprecated in favor of using those blocks. - pub fn try_new(func: F) -> Result - where - F: FnOnce(&mut RuntimeArgs) -> Result<(), CLValueError>, - { - let mut runtime_args = RuntimeArgs::new(); - func(&mut runtime_args)?; - Ok(runtime_args) - } - - /// Gets an argument by its name. - pub fn get(&self, name: &str) -> Option<&CLValue> { - self.0.iter().find_map(|NamedArg(named_name, named_value)| { - if named_name == name { - Some(named_value) - } else { - None - } - }) - } - - /// Gets the length of the collection. - pub fn len(&self) -> usize { - self.0.len() - } - - /// Returns `true` if the collection of arguments is empty. - pub fn is_empty(&self) -> bool { - self.0.is_empty() - } - - /// Inserts a new named argument into the collection. - pub fn insert(&mut self, key: K, value: V) -> Result<(), CLValueError> - where - K: Into, - V: CLTyped + ToBytes, - { - let cl_value = CLValue::from_t(value)?; - self.0.push(NamedArg(key.into(), cl_value)); - Ok(()) - } - - /// Inserts a new named argument into the collection. - pub fn insert_cl_value(&mut self, key: K, cl_value: CLValue) - where - K: Into, - { - self.0.push(NamedArg(key.into(), cl_value)); - } - - /// Returns all the values of the named args. - pub fn to_values(&self) -> Vec<&CLValue> { - self.0.iter().map(|NamedArg(_name, value)| value).collect() - } - - /// Returns an iterator of references over all arguments in insertion order. - pub fn named_args(&self) -> impl Iterator { - self.0.iter() - } - - /// Returns an iterator of mutable references over all arguments in insertion order. - pub fn named_args_mut(&mut self) -> impl Iterator { - self.0.iter_mut() - } - - /// Returns the numeric value of `name` arg from the runtime arguments or defaults to - /// 0 if that arg doesn't exist or is not an integer type. - /// - /// Supported [`CLType`]s for numeric conversions are U64, and U512. - /// - /// Returns an error if parsing the arg fails. - pub fn try_get_number(&self, name: &str) -> Result { - let amount_arg = match self.get(name) { - None => return Ok(U512::zero()), - Some(arg) => arg, - }; - match amount_arg.cl_type() { - CLType::U512 => amount_arg.clone().into_t::(), - CLType::U64 => amount_arg.clone().into_t::().map(U512::from), - _ => Ok(U512::zero()), - } - } - - /// Returns a random `RuntimeArgs`. - #[cfg(any(feature = "testing", test))] - pub fn random(rng: &mut TestRng) -> Self { - fn random_bytes(rng: &mut TestRng) -> Bytes { - let mut buffer = vec![0u8; rng.gen_range(0..100)]; - rng.fill_bytes(buffer.as_mut()); - Bytes::from(buffer) - } - - let count = rng.gen_range(0..6); - let mut args = RuntimeArgs::new(); - for _ in 0..count { - let key = rng.random_string(1..21); - let value = random_bytes(rng); - let _ = args.insert(key, value); - } - args - } -} - -impl From> for RuntimeArgs { - fn from(values: Vec) -> Self { - RuntimeArgs(values) - } -} - -impl From> for RuntimeArgs { - fn from(cl_values: BTreeMap) -> RuntimeArgs { - RuntimeArgs(cl_values.into_iter().map(NamedArg::from).collect()) - } -} - -impl From for BTreeMap { - fn from(args: RuntimeArgs) -> BTreeMap { - let mut map = BTreeMap::new(); - for named in args.0 { - map.insert(named.0, named.1); - } - map - } -} - -impl ToBytes for RuntimeArgs { - fn to_bytes(&self) -> Result, Error> { - self.0.to_bytes() - } - - fn serialized_length(&self) -> usize { - self.0.serialized_length() - } -} - -impl FromBytes for RuntimeArgs { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (args, remainder) = Vec::::from_bytes(bytes)?; - Ok((RuntimeArgs(args), remainder)) - } -} - -/// Macro that makes it easier to construct named arguments. -/// -/// NOTE: This macro does not propagate possible errors that could occur while creating a -/// [`CLValue`]. For such cases creating [`RuntimeArgs`] manually is recommended. -/// -/// # Example usage -/// ``` -/// use casper_types_ver_2_0::runtime_args; -/// let _named_args = runtime_args! { -/// "foo" => 42, -/// "bar" => "Hello, world!" -/// }; -/// ``` -#[macro_export] -macro_rules! runtime_args { - () => ($crate::RuntimeArgs::new()); - ( $($key:expr => $value:expr,)+ ) => (runtime_args!($($key => $value),+)); - ( $($key:expr => $value:expr),* ) => { - { - let mut named_args = $crate::RuntimeArgs::new(); - $( - named_args.insert($key, $value).unwrap(); - )* - named_args - } - }; -} - -#[cfg(test)] -mod tests { - use super::*; - - const ARG_AMOUNT: &str = "amount"; - - #[test] - fn test_runtime_args() { - let arg1 = CLValue::from_t(1).unwrap(); - let arg2 = CLValue::from_t("Foo").unwrap(); - let arg3 = CLValue::from_t(Some(1)).unwrap(); - let args = { - let mut map = BTreeMap::new(); - map.insert("bar".into(), arg2.clone()); - map.insert("foo".into(), arg1.clone()); - map.insert("qwer".into(), arg3.clone()); - map - }; - let runtime_args = RuntimeArgs::from(args); - assert_eq!(runtime_args.get("qwer"), Some(&arg3)); - assert_eq!(runtime_args.get("foo"), Some(&arg1)); - assert_eq!(runtime_args.get("bar"), Some(&arg2)); - assert_eq!(runtime_args.get("aaa"), None); - - // Ensure macro works - - let runtime_args_2 = runtime_args! { - "bar" => "Foo", - "foo" => 1i32, - "qwer" => Some(1i32), - }; - assert_eq!(runtime_args, runtime_args_2); - } - - #[test] - fn empty_macro() { - assert_eq!(runtime_args! {}, RuntimeArgs::new()); - } - - #[test] - fn btreemap_compat() { - // This test assumes same serialization format as BTreeMap - let runtime_args_1 = runtime_args! { - "bar" => "Foo", - "foo" => 1i32, - "qwer" => Some(1i32), - }; - let tagless = runtime_args_1.to_bytes().unwrap().to_vec(); - - let mut runtime_args_2 = BTreeMap::new(); - runtime_args_2.insert(String::from("bar"), CLValue::from_t("Foo").unwrap()); - runtime_args_2.insert(String::from("foo"), CLValue::from_t(1i32).unwrap()); - runtime_args_2.insert(String::from("qwer"), CLValue::from_t(Some(1i32)).unwrap()); - - assert_eq!(tagless, runtime_args_2.to_bytes().unwrap()); - } - - #[test] - fn named_serialization_roundtrip() { - let args = runtime_args! { - "foo" => 1i32, - }; - bytesrepr::test_serialization_roundtrip(&args); - } - - #[test] - fn should_create_args_with() { - let res = RuntimeArgs::try_new(|runtime_args| { - runtime_args.insert(String::from("foo"), 123)?; - runtime_args.insert(String::from("bar"), 456)?; - Ok(()) - }); - - let expected = runtime_args! { - "foo" => 123, - "bar" => 456, - }; - assert!(matches!(res, Ok(args) if expected == args)); - } - - #[test] - fn try_get_number_should_work() { - let mut args = RuntimeArgs::new(); - args.insert(ARG_AMOUNT, 0u64).expect("is ok"); - assert_eq!(args.try_get_number(ARG_AMOUNT).unwrap(), U512::zero()); - - let mut args = RuntimeArgs::new(); - args.insert(ARG_AMOUNT, U512::zero()).expect("is ok"); - assert_eq!(args.try_get_number(ARG_AMOUNT).unwrap(), U512::zero()); - - let args = RuntimeArgs::new(); - assert_eq!(args.try_get_number(ARG_AMOUNT).unwrap(), U512::zero()); - - let hundred = 100u64; - - let mut args = RuntimeArgs::new(); - let input = U512::from(hundred); - args.insert(ARG_AMOUNT, input).expect("is ok"); - assert_eq!(args.try_get_number(ARG_AMOUNT).unwrap(), input); - - let mut args = RuntimeArgs::new(); - args.insert(ARG_AMOUNT, hundred).expect("is ok"); - assert_eq!( - args.try_get_number(ARG_AMOUNT).unwrap(), - U512::from(hundred) - ); - } - - #[test] - fn try_get_number_should_return_zero_for_non_numeric_type() { - let mut args = RuntimeArgs::new(); - args.insert(ARG_AMOUNT, "Non-numeric-string").unwrap(); - assert_eq!( - args.try_get_number(ARG_AMOUNT).expect("should get amount"), - U512::zero() - ); - } - - #[test] - fn try_get_number_should_return_zero_if_amount_is_missing() { - let args = RuntimeArgs::new(); - assert_eq!( - args.try_get_number(ARG_AMOUNT).expect("should get amount"), - U512::zero() - ); - } -} diff --git a/casper_types_ver_2_0/src/transaction/transaction_approvals_hash.rs b/casper_types_ver_2_0/src/transaction/transaction_approvals_hash.rs deleted file mode 100644 index ed11ee42..00000000 --- a/casper_types_ver_2_0/src/transaction/transaction_approvals_hash.rs +++ /dev/null @@ -1,110 +0,0 @@ -use alloc::vec::Vec; -use core::fmt::{self, Display, Formatter}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -use serde::{Deserialize, Serialize}; - -#[cfg(doc)] -use super::TransactionV1; -use super::{DeployApprovalsHash, TransactionV1ApprovalsHash}; -use crate::bytesrepr::{self, FromBytes, ToBytes, U8_SERIALIZED_LENGTH}; - -const DEPLOY_TAG: u8 = 0; -const V1_TAG: u8 = 1; - -/// A versioned wrapper for a transaction approvals hash or deploy approvals hash. -#[derive(Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[serde(deny_unknown_fields)] -pub enum TransactionApprovalsHash { - /// A deploy approvals hash. - Deploy(DeployApprovalsHash), - /// A version 1 transaction approvals hash. - #[serde(rename = "Version1")] - V1(TransactionV1ApprovalsHash), -} - -impl From for TransactionApprovalsHash { - fn from(hash: DeployApprovalsHash) -> Self { - Self::Deploy(hash) - } -} - -impl From for TransactionApprovalsHash { - fn from(hash: TransactionV1ApprovalsHash) -> Self { - Self::V1(hash) - } -} - -impl Display for TransactionApprovalsHash { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - match self { - TransactionApprovalsHash::Deploy(hash) => Display::fmt(hash, formatter), - TransactionApprovalsHash::V1(hash) => Display::fmt(hash, formatter), - } - } -} - -impl ToBytes for TransactionApprovalsHash { - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - match self { - TransactionApprovalsHash::Deploy(hash) => { - DEPLOY_TAG.write_bytes(writer)?; - hash.write_bytes(writer) - } - TransactionApprovalsHash::V1(hash) => { - V1_TAG.write_bytes(writer)?; - hash.write_bytes(writer) - } - } - } - - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - U8_SERIALIZED_LENGTH - + match self { - TransactionApprovalsHash::Deploy(hash) => hash.serialized_length(), - TransactionApprovalsHash::V1(hash) => hash.serialized_length(), - } - } -} - -impl FromBytes for TransactionApprovalsHash { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (tag, remainder) = u8::from_bytes(bytes)?; - match tag { - DEPLOY_TAG => { - let (hash, remainder) = DeployApprovalsHash::from_bytes(remainder)?; - Ok((TransactionApprovalsHash::Deploy(hash), remainder)) - } - V1_TAG => { - let (hash, remainder) = TransactionV1ApprovalsHash::from_bytes(remainder)?; - Ok((TransactionApprovalsHash::V1(hash), remainder)) - } - _ => Err(bytesrepr::Error::Formatting), - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::testing::TestRng; - - #[test] - fn bytesrepr_roundtrip() { - let rng = &mut TestRng::new(); - - let hash = TransactionApprovalsHash::from(DeployApprovalsHash::random(rng)); - bytesrepr::test_serialization_roundtrip(&hash); - - let hash = TransactionApprovalsHash::from(TransactionV1ApprovalsHash::random(rng)); - bytesrepr::test_serialization_roundtrip(&hash); - } -} diff --git a/casper_types_ver_2_0/src/transaction/transaction_entry_point.rs b/casper_types_ver_2_0/src/transaction/transaction_entry_point.rs deleted file mode 100644 index 45e3afb1..00000000 --- a/casper_types_ver_2_0/src/transaction/transaction_entry_point.rs +++ /dev/null @@ -1,232 +0,0 @@ -use alloc::{string::String, vec::Vec}; -use core::fmt::{self, Display, Formatter}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(any(feature = "testing", test))] -use rand::Rng; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -#[cfg(doc)] -use super::Transaction; -use crate::bytesrepr::{self, FromBytes, ToBytes, U8_SERIALIZED_LENGTH}; -#[cfg(any(feature = "testing", test))] -use crate::testing::TestRng; - -const CUSTOM_TAG: u8 = 0; -const TRANSFER_TAG: u8 = 1; -const ADD_BID_TAG: u8 = 2; -const WITHDRAW_BID_TAG: u8 = 3; -const DELEGATE_TAG: u8 = 4; -const UNDELEGATE_TAG: u8 = 5; -const REDELEGATE_TAG: u8 = 6; - -/// The entry point of a [`Transaction`]. -#[derive(Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr( - feature = "json-schema", - derive(JsonSchema), - schemars(description = "Entry point of a Transaction.") -)] -#[serde(deny_unknown_fields)] -pub enum TransactionEntryPoint { - /// A non-native, arbitrary entry point. - Custom(String), - /// The `transfer` native entry point, used to transfer `Motes` from a source purse to a target - /// purse. - /// - /// Requires the following runtime args: - /// * "source": `URef` - /// * "target": `URef` - /// * "amount": `U512` - /// - /// The following optional runtime args can also be provided: - /// * "to": `Option` - /// * "id": `Option` - #[cfg_attr( - feature = "json-schema", - schemars( - description = "The `transfer` native entry point, used to transfer `Motes` from a \ - source purse to a target purse." - ) - )] - Transfer, - /// The `add_bid` native entry point, used to create or top off a bid purse. - /// - /// Requires the following runtime args: - /// * "public_key": `PublicKey` - /// * "delegation_rate": `u8` - /// * "amount": `U512` - #[cfg_attr( - feature = "json-schema", - schemars( - description = "The `add_bid` native entry point, used to create or top off a bid purse." - ) - )] - AddBid, - /// The `withdraw_bid` native entry point, used to decrease a stake. - /// - /// Requires the following runtime args: - /// * "public_key": `PublicKey` - /// * "amount": `U512` - #[cfg_attr( - feature = "json-schema", - schemars(description = "The `withdraw_bid` native entry point, used to decrease a stake.") - )] - WithdrawBid, - - /// The `delegate` native entry point, used to add a new delegator or increase an existing - /// delegator's stake. - /// - /// Requires the following runtime args: - /// * "delegator": `PublicKey` - /// * "validator": `PublicKey` - /// * "amount": `U512` - #[cfg_attr( - feature = "json-schema", - schemars( - description = "The `delegate` native entry point, used to add a new delegator or \ - increase an existing delegator's stake." - ) - )] - Delegate, - - /// The `undelegate` native entry point, used to reduce a delegator's stake or remove the - /// delegator if the remaining stake is 0. - /// - /// Requires the following runtime args: - /// * "delegator": `PublicKey` - /// * "validator": `PublicKey` - /// * "amount": `U512` - #[cfg_attr( - feature = "json-schema", - schemars( - description = "The `undelegate` native entry point, used to reduce a delegator's \ - stake or remove the delegator if the remaining stake is 0." - ) - )] - Undelegate, - - /// The `redelegate` native entry point, used to reduce a delegator's stake or remove the - /// delegator if the remaining stake is 0, and after the unbonding delay, automatically - /// delegate to a new validator. - /// - /// Requires the following runtime args: - /// * "delegator": `PublicKey` - /// * "validator": `PublicKey` - /// * "amount": `U512` - /// * "new_validator": `PublicKey` - #[cfg_attr( - feature = "json-schema", - schemars( - description = "The `redelegate` native entry point, used to reduce a delegator's stake \ - or remove the delegator if the remaining stake is 0, and after the unbonding delay, \ - automatically delegate to a new validator." - ) - )] - Redelegate, -} - -impl TransactionEntryPoint { - /// Returns a random `TransactionEntryPoint`. - #[cfg(any(feature = "testing", test))] - pub fn random(rng: &mut TestRng) -> Self { - match rng.gen_range(0..7) { - CUSTOM_TAG => TransactionEntryPoint::Custom(rng.random_string(1..21)), - TRANSFER_TAG => TransactionEntryPoint::Transfer, - ADD_BID_TAG => TransactionEntryPoint::AddBid, - WITHDRAW_BID_TAG => TransactionEntryPoint::WithdrawBid, - DELEGATE_TAG => TransactionEntryPoint::Delegate, - UNDELEGATE_TAG => TransactionEntryPoint::Undelegate, - REDELEGATE_TAG => TransactionEntryPoint::Redelegate, - _ => unreachable!(), - } - } -} - -impl Display for TransactionEntryPoint { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - match self { - TransactionEntryPoint::Custom(entry_point) => { - write!(formatter, "custom({entry_point})") - } - TransactionEntryPoint::Transfer => write!(formatter, "transfer"), - TransactionEntryPoint::AddBid => write!(formatter, "add_bid"), - TransactionEntryPoint::WithdrawBid => write!(formatter, "withdraw_bid"), - TransactionEntryPoint::Delegate => write!(formatter, "delegate"), - TransactionEntryPoint::Undelegate => write!(formatter, "undelegate"), - TransactionEntryPoint::Redelegate => write!(formatter, "redelegate"), - } - } -} - -impl ToBytes for TransactionEntryPoint { - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - match self { - TransactionEntryPoint::Custom(entry_point) => { - CUSTOM_TAG.write_bytes(writer)?; - entry_point.write_bytes(writer) - } - TransactionEntryPoint::Transfer => TRANSFER_TAG.write_bytes(writer), - TransactionEntryPoint::AddBid => ADD_BID_TAG.write_bytes(writer), - TransactionEntryPoint::WithdrawBid => WITHDRAW_BID_TAG.write_bytes(writer), - TransactionEntryPoint::Delegate => DELEGATE_TAG.write_bytes(writer), - TransactionEntryPoint::Undelegate => UNDELEGATE_TAG.write_bytes(writer), - TransactionEntryPoint::Redelegate => REDELEGATE_TAG.write_bytes(writer), - } - } - - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - U8_SERIALIZED_LENGTH - + match self { - TransactionEntryPoint::Custom(entry_point) => entry_point.serialized_length(), - TransactionEntryPoint::Transfer - | TransactionEntryPoint::AddBid - | TransactionEntryPoint::WithdrawBid - | TransactionEntryPoint::Delegate - | TransactionEntryPoint::Undelegate - | TransactionEntryPoint::Redelegate => 0, - } - } -} - -impl FromBytes for TransactionEntryPoint { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (tag, remainder) = u8::from_bytes(bytes)?; - match tag { - CUSTOM_TAG => { - let (entry_point, remainder) = String::from_bytes(remainder)?; - Ok((TransactionEntryPoint::Custom(entry_point), remainder)) - } - TRANSFER_TAG => Ok((TransactionEntryPoint::Transfer, remainder)), - ADD_BID_TAG => Ok((TransactionEntryPoint::AddBid, remainder)), - WITHDRAW_BID_TAG => Ok((TransactionEntryPoint::WithdrawBid, remainder)), - DELEGATE_TAG => Ok((TransactionEntryPoint::Delegate, remainder)), - UNDELEGATE_TAG => Ok((TransactionEntryPoint::Undelegate, remainder)), - REDELEGATE_TAG => Ok((TransactionEntryPoint::Redelegate, remainder)), - _ => Err(bytesrepr::Error::Formatting), - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn bytesrepr_roundtrip() { - let rng = &mut TestRng::new(); - for _ in 0..10 { - bytesrepr::test_serialization_roundtrip(&TransactionEntryPoint::random(rng)); - } - } -} diff --git a/casper_types_ver_2_0/src/transaction/transaction_hash.rs b/casper_types_ver_2_0/src/transaction/transaction_hash.rs deleted file mode 100644 index 7f7d31f9..00000000 --- a/casper_types_ver_2_0/src/transaction/transaction_hash.rs +++ /dev/null @@ -1,143 +0,0 @@ -use alloc::vec::Vec; -use core::fmt::{self, Display, Formatter}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -#[cfg(doc)] -use super::TransactionV1; -use super::{DeployHash, TransactionV1Hash}; -use crate::bytesrepr::{self, FromBytes, ToBytes, U8_SERIALIZED_LENGTH}; - -#[cfg(any(feature = "testing", test))] -use rand::Rng; - -#[cfg(any(feature = "testing", test))] -use crate::testing::TestRng; - -const DEPLOY_TAG: u8 = 0; -const V1_TAG: u8 = 1; - -/// A versioned wrapper for a transaction hash or deploy hash. -#[derive(Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[serde(deny_unknown_fields)] -pub enum TransactionHash { - /// A deploy hash. - Deploy(DeployHash), - /// A version 1 transaction hash. - #[serde(rename = "Version1")] - V1(TransactionV1Hash), -} - -impl TransactionHash { - /// Returns a random `TransactionHash`. - #[cfg(any(feature = "testing", test))] - pub fn random(rng: &mut TestRng) -> Self { - match rng.gen_range(0..2) { - 0 => TransactionHash::from(DeployHash::random(rng)), - 1 => TransactionHash::from(TransactionV1Hash::random(rng)), - _ => panic!(), - } - } -} - -impl From for TransactionHash { - fn from(hash: DeployHash) -> Self { - Self::Deploy(hash) - } -} - -impl From<&DeployHash> for TransactionHash { - fn from(hash: &DeployHash) -> Self { - Self::from(*hash) - } -} - -impl From for TransactionHash { - fn from(hash: TransactionV1Hash) -> Self { - Self::V1(hash) - } -} - -impl From<&TransactionV1Hash> for TransactionHash { - fn from(hash: &TransactionV1Hash) -> Self { - Self::from(*hash) - } -} - -impl Display for TransactionHash { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - match self { - TransactionHash::Deploy(hash) => Display::fmt(hash, formatter), - TransactionHash::V1(hash) => Display::fmt(hash, formatter), - } - } -} - -impl ToBytes for TransactionHash { - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - match self { - TransactionHash::Deploy(hash) => { - DEPLOY_TAG.write_bytes(writer)?; - hash.write_bytes(writer) - } - TransactionHash::V1(hash) => { - V1_TAG.write_bytes(writer)?; - hash.write_bytes(writer) - } - } - } - - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - U8_SERIALIZED_LENGTH - + match self { - TransactionHash::Deploy(hash) => hash.serialized_length(), - TransactionHash::V1(hash) => hash.serialized_length(), - } - } -} - -impl FromBytes for TransactionHash { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (tag, remainder) = u8::from_bytes(bytes)?; - match tag { - DEPLOY_TAG => { - let (hash, remainder) = DeployHash::from_bytes(remainder)?; - Ok((TransactionHash::Deploy(hash), remainder)) - } - V1_TAG => { - let (hash, remainder) = TransactionV1Hash::from_bytes(remainder)?; - Ok((TransactionHash::V1(hash), remainder)) - } - _ => Err(bytesrepr::Error::Formatting), - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::testing::TestRng; - - #[test] - fn bytesrepr_roundtrip() { - let rng = &mut TestRng::new(); - - let hash = TransactionHash::from(DeployHash::random(rng)); - bytesrepr::test_serialization_roundtrip(&hash); - - let hash = TransactionHash::from(TransactionV1Hash::random(rng)); - bytesrepr::test_serialization_roundtrip(&hash); - } -} diff --git a/casper_types_ver_2_0/src/transaction/transaction_header.rs b/casper_types_ver_2_0/src/transaction/transaction_header.rs deleted file mode 100644 index d1a864bb..00000000 --- a/casper_types_ver_2_0/src/transaction/transaction_header.rs +++ /dev/null @@ -1,116 +0,0 @@ -use alloc::vec::Vec; -use core::fmt::{self, Display, Formatter}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -#[cfg(any(feature = "std", test))] -use serde::{Deserialize, Serialize}; - -use super::{DeployHeader, TransactionV1Header}; -use crate::bytesrepr::{self, FromBytes, ToBytes, U8_SERIALIZED_LENGTH}; - -const DEPLOY_TAG: u8 = 0; -const V1_TAG: u8 = 1; - -/// A versioned wrapper for a transaction header or deploy header. -#[derive(Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Debug)] -#[cfg_attr( - any(feature = "std", test), - derive(Serialize, Deserialize), - serde(deny_unknown_fields) -)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -pub enum TransactionHeader { - /// A deploy header. - Deploy(DeployHeader), - /// A version 1 transaction header. - #[cfg_attr(any(feature = "std", test), serde(rename = "Version1"))] - V1(TransactionV1Header), -} - -impl From for TransactionHeader { - fn from(hash: DeployHeader) -> Self { - Self::Deploy(hash) - } -} - -impl From for TransactionHeader { - fn from(hash: TransactionV1Header) -> Self { - Self::V1(hash) - } -} - -impl Display for TransactionHeader { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - match self { - TransactionHeader::Deploy(hash) => Display::fmt(hash, formatter), - TransactionHeader::V1(hash) => Display::fmt(hash, formatter), - } - } -} - -impl ToBytes for TransactionHeader { - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - match self { - TransactionHeader::Deploy(header) => { - DEPLOY_TAG.write_bytes(writer)?; - header.write_bytes(writer) - } - TransactionHeader::V1(header) => { - V1_TAG.write_bytes(writer)?; - header.write_bytes(writer) - } - } - } - - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - U8_SERIALIZED_LENGTH - + match self { - TransactionHeader::Deploy(header) => header.serialized_length(), - TransactionHeader::V1(header) => header.serialized_length(), - } - } -} - -impl FromBytes for TransactionHeader { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (tag, remainder) = u8::from_bytes(bytes)?; - match tag { - DEPLOY_TAG => { - let (header, remainder) = DeployHeader::from_bytes(remainder)?; - Ok((TransactionHeader::Deploy(header), remainder)) - } - V1_TAG => { - let (header, remainder) = TransactionV1Header::from_bytes(remainder)?; - Ok((TransactionHeader::V1(header), remainder)) - } - _ => Err(bytesrepr::Error::Formatting), - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::{testing::TestRng, Deploy, TransactionV1}; - - #[test] - fn bytesrepr_roundtrip() { - let rng = &mut TestRng::new(); - - let header = TransactionHeader::from(Deploy::random(rng).take_header()); - bytesrepr::test_serialization_roundtrip(&header); - - let header = TransactionHeader::from(TransactionV1::random(rng).take_header()); - bytesrepr::test_serialization_roundtrip(&header); - } -} diff --git a/casper_types_ver_2_0/src/transaction/transaction_id.rs b/casper_types_ver_2_0/src/transaction/transaction_id.rs deleted file mode 100644 index 8f9569b9..00000000 --- a/casper_types_ver_2_0/src/transaction/transaction_id.rs +++ /dev/null @@ -1,197 +0,0 @@ -use alloc::vec::Vec; -use core::fmt::{self, Display, Formatter}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(any(feature = "testing", test))] -use rand::Rng; -use serde::{Deserialize, Serialize}; - -#[cfg(doc)] -use super::Transaction; -use super::{ - DeployApprovalsHash, DeployHash, TransactionApprovalsHash, TransactionHash, - TransactionV1ApprovalsHash, TransactionV1Hash, -}; -use crate::bytesrepr::{self, FromBytes, ToBytes, U8_SERIALIZED_LENGTH}; -#[cfg(any(feature = "testing", test))] -use crate::testing::TestRng; - -const DEPLOY_TAG: u8 = 0; -const V1_TAG: u8 = 1; - -/// The unique identifier of a [`Transaction`], comprising its [`TransactionHash`] and -/// [`TransactionApprovalsHash`]. -#[derive(Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[serde(deny_unknown_fields)] -pub enum TransactionId { - /// A deploy identifier. - Deploy { - /// The deploy hash. - deploy_hash: DeployHash, - /// The deploy's approvals hash. - approvals_hash: DeployApprovalsHash, - }, - /// A version 1 transaction identifier. - #[serde(rename = "Version1")] - V1 { - /// The transaction hash. - transaction_v1_hash: TransactionV1Hash, - /// The transaction's approvals hash. - approvals_hash: TransactionV1ApprovalsHash, - }, -} - -impl TransactionId { - /// Returns a new `TransactionId::Deploy`. - pub fn new_deploy(deploy_hash: DeployHash, approvals_hash: DeployApprovalsHash) -> Self { - TransactionId::Deploy { - deploy_hash, - approvals_hash, - } - } - - /// Returns a new `TransactionId::V1`. - pub fn new_v1( - transaction_v1_hash: TransactionV1Hash, - approvals_hash: TransactionV1ApprovalsHash, - ) -> Self { - TransactionId::V1 { - transaction_v1_hash, - approvals_hash, - } - } - - /// Returns the transaction hash. - pub fn transaction_hash(&self) -> TransactionHash { - match self { - TransactionId::Deploy { deploy_hash, .. } => TransactionHash::from(*deploy_hash), - TransactionId::V1 { - transaction_v1_hash, - .. - } => TransactionHash::from(*transaction_v1_hash), - } - } - - /// Returns the approvals hash. - pub fn approvals_hash(&self) -> TransactionApprovalsHash { - match self { - TransactionId::Deploy { approvals_hash, .. } => { - TransactionApprovalsHash::from(*approvals_hash) - } - TransactionId::V1 { approvals_hash, .. } => { - TransactionApprovalsHash::from(*approvals_hash) - } - } - } - - /// Returns a random `TransactionId`. - #[cfg(any(feature = "testing", test))] - pub fn random(rng: &mut TestRng) -> Self { - if rng.gen() { - return TransactionId::new_deploy( - DeployHash::random(rng), - DeployApprovalsHash::random(rng), - ); - } - TransactionId::new_v1( - TransactionV1Hash::random(rng), - TransactionV1ApprovalsHash::random(rng), - ) - } -} - -impl Display for TransactionId { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - write!( - formatter, - "transaction-id({}, {})", - self.transaction_hash(), - self.approvals_hash() - ) - } -} - -impl ToBytes for TransactionId { - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - match self { - TransactionId::Deploy { - deploy_hash, - approvals_hash, - } => { - DEPLOY_TAG.write_bytes(writer)?; - deploy_hash.write_bytes(writer)?; - approvals_hash.write_bytes(writer) - } - TransactionId::V1 { - transaction_v1_hash, - approvals_hash, - } => { - V1_TAG.write_bytes(writer)?; - transaction_v1_hash.write_bytes(writer)?; - approvals_hash.write_bytes(writer) - } - } - } - - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - U8_SERIALIZED_LENGTH - + match self { - TransactionId::Deploy { - deploy_hash, - approvals_hash, - } => deploy_hash.serialized_length() + approvals_hash.serialized_length(), - TransactionId::V1 { - transaction_v1_hash, - approvals_hash, - } => transaction_v1_hash.serialized_length() + approvals_hash.serialized_length(), - } - } -} - -impl FromBytes for TransactionId { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (tag, remainder) = u8::from_bytes(bytes)?; - match tag { - DEPLOY_TAG => { - let (deploy_hash, remainder) = DeployHash::from_bytes(remainder)?; - let (approvals_hash, remainder) = DeployApprovalsHash::from_bytes(remainder)?; - let id = TransactionId::Deploy { - deploy_hash, - approvals_hash, - }; - Ok((id, remainder)) - } - V1_TAG => { - let (transaction_v1_hash, remainder) = TransactionV1Hash::from_bytes(remainder)?; - let (approvals_hash, remainder) = - TransactionV1ApprovalsHash::from_bytes(remainder)?; - let id = TransactionId::V1 { - transaction_v1_hash, - approvals_hash, - }; - Ok((id, remainder)) - } - _ => Err(bytesrepr::Error::Formatting), - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn bytesrepr_roundtrip() { - let rng = &mut TestRng::new(); - let id = TransactionId::random(rng); - bytesrepr::test_serialization_roundtrip(&id); - } -} diff --git a/casper_types_ver_2_0/src/transaction/transaction_invocation_target.rs b/casper_types_ver_2_0/src/transaction/transaction_invocation_target.rs deleted file mode 100644 index c9a322f3..00000000 --- a/casper_types_ver_2_0/src/transaction/transaction_invocation_target.rs +++ /dev/null @@ -1,303 +0,0 @@ -use alloc::{string::String, vec::Vec}; -use core::fmt::{self, Debug, Display, Formatter}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -use hex_fmt::HexFmt; -#[cfg(any(feature = "testing", test))] -use rand::Rng; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -use super::AddressableEntityIdentifier; -#[cfg(doc)] -use super::TransactionTarget; -#[cfg(any(feature = "testing", test))] -use crate::testing::TestRng; -use crate::{ - bytesrepr::{self, FromBytes, ToBytes, U8_SERIALIZED_LENGTH}, - serde_helpers, AddressableEntityHash, EntityAddr, EntityVersion, PackageAddr, PackageHash, - PackageIdentifier, -}; - -const INVOCABLE_ENTITY_TAG: u8 = 0; -const INVOCABLE_ENTITY_ALIAS_TAG: u8 = 1; -const PACKAGE_TAG: u8 = 2; -const PACKAGE_ALIAS_TAG: u8 = 3; - -/// The identifier of a [`TransactionTarget::Stored`]. -#[derive(Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Serialize, Deserialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr( - feature = "json-schema", - derive(JsonSchema), - schemars(description = "Identifier of a `Stored` transaction target.") -)] -#[serde(deny_unknown_fields)] -pub enum TransactionInvocationTarget { - /// The address identifying the invocable entity. - #[serde(with = "serde_helpers::raw_32_byte_array")] - #[cfg_attr( - feature = "json-schema", - schemars( - with = "String", - description = "Hex-encoded entity address identifying the invocable entity." - ) - )] - InvocableEntity(EntityAddr), // currently needs to be of contract tag variant - /// The alias identifying the invocable entity. - InvocableEntityAlias(String), - /// The address and optional version identifying the package. - Package { - /// The package address. - #[serde(with = "serde_helpers::raw_32_byte_array")] - #[cfg_attr( - feature = "json-schema", - schemars(with = "String", description = "Hex-encoded address of the package.") - )] - addr: PackageAddr, - /// The package version. - /// - /// If `None`, the latest enabled version is implied. - version: Option, - }, - /// The alias and optional version identifying the package. - PackageAlias { - /// The package alias. - alias: String, - /// The package version. - /// - /// If `None`, the latest enabled version is implied. - version: Option, - }, -} - -impl TransactionInvocationTarget { - /// Returns a new `TransactionInvocationTarget::InvocableEntity`. - pub fn new_invocable_entity(addr: EntityAddr) -> Self { - TransactionInvocationTarget::InvocableEntity(addr) - } - - /// Returns a new `TransactionInvocationTarget::InvocableEntityAlias`. - pub fn new_invocable_entity_alias(alias: String) -> Self { - TransactionInvocationTarget::InvocableEntityAlias(alias) - } - - /// Returns a new `TransactionInvocationTarget::Package`. - pub fn new_package(addr: PackageAddr, version: Option) -> Self { - TransactionInvocationTarget::Package { addr, version } - } - - /// Returns a new `TransactionInvocationTarget::PackageAlias`. - pub fn new_package_alias(alias: String, version: Option) -> Self { - TransactionInvocationTarget::PackageAlias { alias, version } - } - - /// Returns the identifier of the addressable entity, if present. - pub fn addressable_entity_identifier(&self) -> Option { - match self { - TransactionInvocationTarget::InvocableEntity(addr) => Some( - AddressableEntityIdentifier::Hash(AddressableEntityHash::new(*addr)), - ), - TransactionInvocationTarget::InvocableEntityAlias(alias) => { - Some(AddressableEntityIdentifier::Name(alias.clone())) - } - TransactionInvocationTarget::Package { .. } - | TransactionInvocationTarget::PackageAlias { .. } => None, - } - } - - /// Returns the identifier of the contract package, if present. - pub fn package_identifier(&self) -> Option { - match self { - TransactionInvocationTarget::InvocableEntity(_) - | TransactionInvocationTarget::InvocableEntityAlias(_) => None, - TransactionInvocationTarget::Package { addr, version } => { - Some(PackageIdentifier::Hash { - package_hash: PackageHash::new(*addr), - version: *version, - }) - } - TransactionInvocationTarget::PackageAlias { alias, version } => { - Some(PackageIdentifier::Name { - name: alias.clone(), - version: *version, - }) - } - } - } - - /// Returns a random `TransactionInvocationTarget`. - #[cfg(any(feature = "testing", test))] - pub fn random(rng: &mut TestRng) -> Self { - match rng.gen_range(0..4) { - INVOCABLE_ENTITY_TAG => TransactionInvocationTarget::InvocableEntity(rng.gen()), - INVOCABLE_ENTITY_ALIAS_TAG => { - TransactionInvocationTarget::InvocableEntityAlias(rng.random_string(1..21)) - } - PACKAGE_TAG => TransactionInvocationTarget::Package { - addr: rng.gen(), - version: rng.gen::().then(|| rng.gen::()), - }, - PACKAGE_ALIAS_TAG => TransactionInvocationTarget::PackageAlias { - alias: rng.random_string(1..21), - version: rng.gen::().then(|| rng.gen::()), - }, - _ => unreachable!(), - } - } -} - -impl Display for TransactionInvocationTarget { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - match self { - TransactionInvocationTarget::InvocableEntity(addr) => { - write!(formatter, "invocable-entity({:10})", HexFmt(addr)) - } - TransactionInvocationTarget::InvocableEntityAlias(alias) => { - write!(formatter, "invocable-entity({})", alias) - } - TransactionInvocationTarget::Package { - addr, - version: Some(ver), - } => { - write!(formatter, "package({:10}, version {})", HexFmt(addr), ver) - } - TransactionInvocationTarget::Package { - addr, - version: None, - } => { - write!(formatter, "package({:10}, latest)", HexFmt(addr)) - } - TransactionInvocationTarget::PackageAlias { - alias, - version: Some(ver), - } => { - write!(formatter, "package({}, version {})", alias, ver) - } - TransactionInvocationTarget::PackageAlias { - alias, - version: None, - } => { - write!(formatter, "package({}, latest)", alias) - } - } - } -} - -impl Debug for TransactionInvocationTarget { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - match self { - TransactionInvocationTarget::InvocableEntity(addr) => formatter - .debug_tuple("InvocableEntity") - .field(&HexFmt(addr)) - .finish(), - TransactionInvocationTarget::InvocableEntityAlias(alias) => formatter - .debug_tuple("InvocableEntityAlias") - .field(alias) - .finish(), - TransactionInvocationTarget::Package { addr, version } => formatter - .debug_struct("Package") - .field("addr", &HexFmt(addr)) - .field("version", version) - .finish(), - TransactionInvocationTarget::PackageAlias { alias, version } => formatter - .debug_struct("PackageAlias") - .field("alias", alias) - .field("version", version) - .finish(), - } - } -} - -impl ToBytes for TransactionInvocationTarget { - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - match self { - TransactionInvocationTarget::InvocableEntity(addr) => { - INVOCABLE_ENTITY_TAG.write_bytes(writer)?; - addr.write_bytes(writer) - } - TransactionInvocationTarget::InvocableEntityAlias(alias) => { - INVOCABLE_ENTITY_ALIAS_TAG.write_bytes(writer)?; - alias.write_bytes(writer) - } - TransactionInvocationTarget::Package { addr, version } => { - PACKAGE_TAG.write_bytes(writer)?; - addr.write_bytes(writer)?; - version.write_bytes(writer) - } - TransactionInvocationTarget::PackageAlias { alias, version } => { - PACKAGE_ALIAS_TAG.write_bytes(writer)?; - alias.write_bytes(writer)?; - version.write_bytes(writer) - } - } - } - - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - U8_SERIALIZED_LENGTH - + match self { - TransactionInvocationTarget::InvocableEntity(addr) => addr.serialized_length(), - TransactionInvocationTarget::InvocableEntityAlias(alias) => { - alias.serialized_length() - } - TransactionInvocationTarget::Package { addr, version } => { - addr.serialized_length() + version.serialized_length() - } - TransactionInvocationTarget::PackageAlias { alias, version } => { - alias.serialized_length() + version.serialized_length() - } - } - } -} - -impl FromBytes for TransactionInvocationTarget { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (tag, remainder) = u8::from_bytes(bytes)?; - match tag { - INVOCABLE_ENTITY_TAG => { - let (addr, remainder) = EntityAddr::from_bytes(remainder)?; - let target = TransactionInvocationTarget::InvocableEntity(addr); - Ok((target, remainder)) - } - INVOCABLE_ENTITY_ALIAS_TAG => { - let (alias, remainder) = String::from_bytes(remainder)?; - let target = TransactionInvocationTarget::InvocableEntityAlias(alias); - Ok((target, remainder)) - } - PACKAGE_TAG => { - let (addr, remainder) = PackageAddr::from_bytes(remainder)?; - let (version, remainder) = Option::::from_bytes(remainder)?; - let target = TransactionInvocationTarget::Package { addr, version }; - Ok((target, remainder)) - } - PACKAGE_ALIAS_TAG => { - let (alias, remainder) = String::from_bytes(remainder)?; - let (version, remainder) = Option::::from_bytes(remainder)?; - let target = TransactionInvocationTarget::PackageAlias { alias, version }; - Ok((target, remainder)) - } - _ => Err(bytesrepr::Error::Formatting), - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn bytesrepr_roundtrip() { - let rng = &mut TestRng::new(); - for _ in 0..10 { - bytesrepr::test_serialization_roundtrip(&TransactionInvocationTarget::random(rng)); - } - } -} diff --git a/casper_types_ver_2_0/src/transaction/transaction_runtime.rs b/casper_types_ver_2_0/src/transaction/transaction_runtime.rs deleted file mode 100644 index c1fac1ed..00000000 --- a/casper_types_ver_2_0/src/transaction/transaction_runtime.rs +++ /dev/null @@ -1,73 +0,0 @@ -use alloc::vec::Vec; -use core::fmt::{self, Display, Formatter}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -#[cfg(doc)] -use super::Transaction; -use crate::bytesrepr::{self, FromBytes, ToBytes, U8_SERIALIZED_LENGTH}; - -/// The runtime used to execute a [`Transaction`]. -#[derive(Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr( - feature = "json-schema", - derive(JsonSchema), - schemars(description = "Runtime used to execute a Transaction.") -)] -#[serde(deny_unknown_fields)] -#[repr(u8)] -pub enum TransactionRuntime { - /// The Casper Version 1 Virtual Machine. - VmCasperV1, -} - -impl Display for TransactionRuntime { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - match self { - TransactionRuntime::VmCasperV1 => write!(formatter, "vm-casper-v1"), - } - } -} - -impl ToBytes for TransactionRuntime { - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - (*self as u8).write_bytes(writer) - } - - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - U8_SERIALIZED_LENGTH - } -} - -impl FromBytes for TransactionRuntime { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (tag, remainder) = u8::from_bytes(bytes)?; - match tag { - v if v == TransactionRuntime::VmCasperV1 as u8 => { - Ok((TransactionRuntime::VmCasperV1, remainder)) - } - _ => Err(bytesrepr::Error::Formatting), - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn bytesrepr_roundtrip() { - bytesrepr::test_serialization_roundtrip(&TransactionRuntime::VmCasperV1); - } -} diff --git a/casper_types_ver_2_0/src/transaction/transaction_scheduling.rs b/casper_types_ver_2_0/src/transaction/transaction_scheduling.rs deleted file mode 100644 index 381d358e..00000000 --- a/casper_types_ver_2_0/src/transaction/transaction_scheduling.rs +++ /dev/null @@ -1,133 +0,0 @@ -use alloc::vec::Vec; -use core::fmt::{self, Display, Formatter}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(any(feature = "testing", test))] -use rand::Rng; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -#[cfg(any(feature = "std", test))] -use serde::{Deserialize, Serialize}; - -#[cfg(doc)] -use super::Transaction; -#[cfg(any(feature = "testing", test))] -use crate::testing::TestRng; -use crate::{ - bytesrepr::{self, FromBytes, ToBytes, U8_SERIALIZED_LENGTH}, - EraId, Timestamp, -}; - -const STANDARD_TAG: u8 = 0; -const FUTURE_ERA_TAG: u8 = 1; -const FUTURE_TIMESTAMP_TAG: u8 = 2; - -/// The scheduling mode of a [`Transaction`]. -#[derive(Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Debug)] -#[cfg_attr( - any(feature = "std", test), - derive(Serialize, Deserialize), - serde(deny_unknown_fields) -)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr( - feature = "json-schema", - derive(JsonSchema), - schemars(description = "Scheduling mode of a Transaction.") -)] -pub enum TransactionScheduling { - /// No special scheduling applied. - Standard, - /// Execution should be scheduled for the specified era. - FutureEra(EraId), - /// Execution should be scheduled for the specified timestamp or later. - FutureTimestamp(Timestamp), -} - -impl TransactionScheduling { - /// Returns a random `TransactionScheduling`. - #[cfg(any(feature = "testing", test))] - pub fn random(rng: &mut TestRng) -> Self { - match rng.gen_range(0..3) { - STANDARD_TAG => TransactionScheduling::Standard, - FUTURE_ERA_TAG => TransactionScheduling::FutureEra(EraId::random(rng)), - FUTURE_TIMESTAMP_TAG => TransactionScheduling::FutureTimestamp(Timestamp::random(rng)), - _ => unreachable!(), - } - } -} - -impl Display for TransactionScheduling { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - match self { - TransactionScheduling::Standard => write!(formatter, "schedule(standard)"), - TransactionScheduling::FutureEra(era_id) => write!(formatter, "schedule({})", era_id), - TransactionScheduling::FutureTimestamp(timestamp) => { - write!(formatter, "schedule({})", timestamp) - } - } - } -} - -impl ToBytes for TransactionScheduling { - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - match self { - TransactionScheduling::Standard => STANDARD_TAG.write_bytes(writer), - TransactionScheduling::FutureEra(era_id) => { - FUTURE_ERA_TAG.write_bytes(writer)?; - era_id.write_bytes(writer) - } - TransactionScheduling::FutureTimestamp(timestamp) => { - FUTURE_TIMESTAMP_TAG.write_bytes(writer)?; - timestamp.write_bytes(writer) - } - } - } - - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - U8_SERIALIZED_LENGTH - + match self { - TransactionScheduling::Standard => 0, - TransactionScheduling::FutureEra(era_id) => era_id.serialized_length(), - TransactionScheduling::FutureTimestamp(timestamp) => timestamp.serialized_length(), - } - } -} - -impl FromBytes for TransactionScheduling { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (tag, remainder) = u8::from_bytes(bytes)?; - match tag { - STANDARD_TAG => Ok((TransactionScheduling::Standard, remainder)), - FUTURE_ERA_TAG => { - let (era_id, remainder) = EraId::from_bytes(remainder)?; - Ok((TransactionScheduling::FutureEra(era_id), remainder)) - } - FUTURE_TIMESTAMP_TAG => { - let (timestamp, remainder) = Timestamp::from_bytes(remainder)?; - Ok((TransactionScheduling::FutureTimestamp(timestamp), remainder)) - } - _ => Err(bytesrepr::Error::Formatting), - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn bytesrepr_roundtrip() { - let rng = &mut TestRng::new(); - for _ in 0..10 { - bytesrepr::test_serialization_roundtrip(&TransactionScheduling::random(rng)); - } - } -} diff --git a/casper_types_ver_2_0/src/transaction/transaction_session_kind.rs b/casper_types_ver_2_0/src/transaction/transaction_session_kind.rs deleted file mode 100644 index eabe065a..00000000 --- a/casper_types_ver_2_0/src/transaction/transaction_session_kind.rs +++ /dev/null @@ -1,118 +0,0 @@ -use alloc::vec::Vec; -use core::fmt::{self, Display, Formatter}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(any(feature = "testing", test))] -use rand::Rng; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -#[cfg(doc)] -use super::Transaction; -use crate::bytesrepr::{self, FromBytes, ToBytes, U8_SERIALIZED_LENGTH}; -#[cfg(any(feature = "testing", test))] -use crate::testing::TestRng; - -/// The session kind of a [`Transaction`]. -#[derive(Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr( - feature = "json-schema", - derive(JsonSchema), - schemars(description = "Session kind of a Transaction.") -)] -#[serde(deny_unknown_fields)] -#[repr(u8)] -pub enum TransactionSessionKind { - /// A standard (non-special-case) session. - /// - /// This kind of session is not allowed to install or upgrade a stored contract, but can call - /// stored contracts. - Standard = 0, - /// A session which installs a stored contract. - Installer = 1, - /// A session which upgrades a previously-installed stored contract. Such a session must have - /// "package_id: PackageIdentifier" runtime arg present. - Upgrader = 2, - /// A session which doesn't call any stored contracts. - /// - /// This kind of session is not allowed to install or upgrade a stored contract. - Isolated = 3, -} - -impl TransactionSessionKind { - /// Returns a random `TransactionSessionKind`. - #[cfg(any(feature = "testing", test))] - pub fn random(rng: &mut TestRng) -> Self { - match rng.gen_range(0..4) { - v if v == TransactionSessionKind::Standard as u8 => TransactionSessionKind::Standard, - v if v == TransactionSessionKind::Installer as u8 => TransactionSessionKind::Installer, - v if v == TransactionSessionKind::Upgrader as u8 => TransactionSessionKind::Upgrader, - v if v == TransactionSessionKind::Isolated as u8 => TransactionSessionKind::Isolated, - _ => unreachable!(), - } - } -} - -impl Display for TransactionSessionKind { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - match self { - TransactionSessionKind::Standard => write!(formatter, "standard"), - TransactionSessionKind::Installer => write!(formatter, "installer"), - TransactionSessionKind::Upgrader => write!(formatter, "upgrader"), - TransactionSessionKind::Isolated => write!(formatter, "isolated"), - } - } -} - -impl ToBytes for TransactionSessionKind { - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - (*self as u8).write_bytes(writer) - } - - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - U8_SERIALIZED_LENGTH - } -} - -impl FromBytes for TransactionSessionKind { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (tag, remainder) = u8::from_bytes(bytes)?; - match tag { - v if v == TransactionSessionKind::Standard as u8 => { - Ok((TransactionSessionKind::Standard, remainder)) - } - v if v == TransactionSessionKind::Installer as u8 => { - Ok((TransactionSessionKind::Installer, remainder)) - } - v if v == TransactionSessionKind::Upgrader as u8 => { - Ok((TransactionSessionKind::Upgrader, remainder)) - } - v if v == TransactionSessionKind::Isolated as u8 => { - Ok((TransactionSessionKind::Isolated, remainder)) - } - _ => Err(bytesrepr::Error::Formatting), - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn bytesrepr_roundtrip() { - let rng = &mut TestRng::new(); - for _ in 0..10 { - bytesrepr::test_serialization_roundtrip(&TransactionSessionKind::random(rng)); - } - } -} diff --git a/casper_types_ver_2_0/src/transaction/transaction_target.rs b/casper_types_ver_2_0/src/transaction/transaction_target.rs deleted file mode 100644 index 76516f6e..00000000 --- a/casper_types_ver_2_0/src/transaction/transaction_target.rs +++ /dev/null @@ -1,236 +0,0 @@ -use alloc::vec::Vec; -use core::fmt::{self, Debug, Display, Formatter}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(any(feature = "testing", test))] -use rand::{Rng, RngCore}; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -#[cfg(doc)] -use super::Transaction; -use super::{TransactionInvocationTarget, TransactionRuntime, TransactionSessionKind}; -use crate::bytesrepr::{self, Bytes, FromBytes, ToBytes, U8_SERIALIZED_LENGTH}; -#[cfg(any(feature = "testing", test))] -use crate::testing::TestRng; - -const NATIVE_TAG: u8 = 0; -const STORED_TAG: u8 = 1; -const SESSION_TAG: u8 = 2; - -/// The execution target of a [`Transaction`]. -#[derive(Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Serialize, Deserialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr( - feature = "json-schema", - derive(JsonSchema), - schemars(description = "Execution target of a Transaction.") -)] -#[serde(deny_unknown_fields)] -pub enum TransactionTarget { - /// The execution target is a native operation (e.g. a transfer). - Native, - /// The execution target is a stored entity or package. - Stored { - /// The identifier of the stored execution target. - id: TransactionInvocationTarget, - /// The execution runtime to use. - runtime: TransactionRuntime, - }, - /// The execution target is the included module bytes, i.e. compiled Wasm. - Session { - /// The kind of session. - kind: TransactionSessionKind, - /// The compiled Wasm. - module_bytes: Bytes, - /// The execution runtime to use. - runtime: TransactionRuntime, - }, -} - -impl TransactionTarget { - /// Returns a new `TransactionTarget::Native`. - pub fn new_native() -> Self { - TransactionTarget::Native - } - - /// Returns a new `TransactionTarget::Stored`. - pub fn new_stored(id: TransactionInvocationTarget, runtime: TransactionRuntime) -> Self { - TransactionTarget::Stored { id, runtime } - } - - /// Returns a new `TransactionTarget::Session`. - pub fn new_session( - kind: TransactionSessionKind, - module_bytes: Bytes, - runtime: TransactionRuntime, - ) -> Self { - TransactionTarget::Session { - kind, - module_bytes, - runtime, - } - } - - /// Returns a random `TransactionTarget`. - #[cfg(any(feature = "testing", test))] - pub fn random(rng: &mut TestRng) -> Self { - match rng.gen_range(0..3) { - NATIVE_TAG => TransactionTarget::Native, - STORED_TAG => TransactionTarget::new_stored( - TransactionInvocationTarget::random(rng), - TransactionRuntime::VmCasperV1, - ), - SESSION_TAG => { - let mut buffer = vec![0u8; rng.gen_range(0..100)]; - rng.fill_bytes(buffer.as_mut()); - TransactionTarget::new_session( - TransactionSessionKind::random(rng), - Bytes::from(buffer), - TransactionRuntime::VmCasperV1, - ) - } - _ => unreachable!(), - } - } -} - -impl Display for TransactionTarget { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - match self { - TransactionTarget::Native => write!(formatter, "native"), - TransactionTarget::Stored { id, runtime } => { - write!(formatter, "stored({}, {})", id, runtime) - } - TransactionTarget::Session { - kind, - module_bytes, - runtime, - } => write!( - formatter, - "session({}, {} module bytes, {})", - kind, - module_bytes.len(), - runtime - ), - } - } -} - -impl Debug for TransactionTarget { - fn fmt(&self, formatter: &mut Formatter<'_>) -> fmt::Result { - match self { - TransactionTarget::Native => formatter.debug_struct("Native").finish(), - TransactionTarget::Stored { id, runtime } => formatter - .debug_struct("Stored") - .field("id", id) - .field("runtime", runtime) - .finish(), - TransactionTarget::Session { - kind, - module_bytes, - runtime, - } => { - struct BytesLen(usize); - impl Debug for BytesLen { - fn fmt(&self, formatter: &mut Formatter<'_>) -> fmt::Result { - write!(formatter, "{} bytes", self.0) - } - } - - formatter - .debug_struct("Session") - .field("kind", kind) - .field("module_bytes", &BytesLen(module_bytes.len())) - .field("runtime", runtime) - .finish() - } - } - } -} - -impl ToBytes for TransactionTarget { - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - match self { - TransactionTarget::Native => NATIVE_TAG.write_bytes(writer), - TransactionTarget::Stored { id, runtime } => { - STORED_TAG.write_bytes(writer)?; - id.write_bytes(writer)?; - runtime.write_bytes(writer) - } - TransactionTarget::Session { - kind, - module_bytes, - runtime, - } => { - SESSION_TAG.write_bytes(writer)?; - kind.write_bytes(writer)?; - module_bytes.write_bytes(writer)?; - runtime.write_bytes(writer) - } - } - } - - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - U8_SERIALIZED_LENGTH - + match self { - TransactionTarget::Native => 0, - TransactionTarget::Stored { id, runtime } => { - id.serialized_length() + runtime.serialized_length() - } - TransactionTarget::Session { - kind, - module_bytes, - runtime, - } => { - kind.serialized_length() - + module_bytes.serialized_length() - + runtime.serialized_length() - } - } - } -} - -impl FromBytes for TransactionTarget { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (tag, remainder) = u8::from_bytes(bytes)?; - match tag { - NATIVE_TAG => Ok((TransactionTarget::Native, remainder)), - STORED_TAG => { - let (id, remainder) = TransactionInvocationTarget::from_bytes(remainder)?; - let (runtime, remainder) = TransactionRuntime::from_bytes(remainder)?; - let target = TransactionTarget::new_stored(id, runtime); - Ok((target, remainder)) - } - SESSION_TAG => { - let (kind, remainder) = TransactionSessionKind::from_bytes(remainder)?; - let (module_bytes, remainder) = Bytes::from_bytes(remainder)?; - let (runtime, remainder) = TransactionRuntime::from_bytes(remainder)?; - let target = TransactionTarget::new_session(kind, module_bytes, runtime); - Ok((target, remainder)) - } - _ => Err(bytesrepr::Error::Formatting), - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn bytesrepr_roundtrip() { - let rng = &mut TestRng::new(); - for _ in 0..10 { - bytesrepr::test_serialization_roundtrip(&TransactionTarget::random(rng)); - } - } -} diff --git a/casper_types_ver_2_0/src/transaction/transaction_v1.rs b/casper_types_ver_2_0/src/transaction/transaction_v1.rs deleted file mode 100644 index b8bb9f7f..00000000 --- a/casper_types_ver_2_0/src/transaction/transaction_v1.rs +++ /dev/null @@ -1,809 +0,0 @@ -mod errors_v1; -mod finalized_transaction_v1_approvals; -mod transaction_v1_approval; -mod transaction_v1_approvals_hash; -mod transaction_v1_body; -#[cfg(any(feature = "std", test))] -mod transaction_v1_builder; -mod transaction_v1_hash; -mod transaction_v1_header; - -#[cfg(any(feature = "std", test))] -use alloc::string::ToString; -use alloc::{collections::BTreeSet, vec::Vec}; -use core::{ - cmp, - fmt::{self, Debug, Display, Formatter}, - hash, -}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(any(feature = "once_cell", test))] -use once_cell::sync::OnceCell; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -#[cfg(any(feature = "std", test))] -use serde::{Deserialize, Serialize}; -use tracing::debug; - -#[cfg(any(feature = "std", test))] -use super::InitiatorAddrAndSecretKey; -use super::{ - InitiatorAddr, PricingMode, TransactionEntryPoint, TransactionScheduling, TransactionTarget, -}; -#[cfg(any(all(feature = "std", feature = "testing"), test))] -use crate::testing::TestRng; -#[cfg(any(feature = "std", test))] -use crate::TransactionConfig; -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - crypto, Digest, DisplayIter, RuntimeArgs, SecretKey, TimeDiff, Timestamp, -}; -pub use errors_v1::{ - DecodeFromJsonErrorV1 as TransactionV1DecodeFromJsonError, ErrorV1 as TransactionV1Error, - ExcessiveSizeErrorV1 as TransactionV1ExcessiveSizeError, TransactionV1ConfigFailure, -}; -pub use finalized_transaction_v1_approvals::FinalizedTransactionV1Approvals; -pub use transaction_v1_approval::TransactionV1Approval; -pub use transaction_v1_approvals_hash::TransactionV1ApprovalsHash; -pub use transaction_v1_body::TransactionV1Body; -#[cfg(any(feature = "std", test))] -pub use transaction_v1_builder::{TransactionV1Builder, TransactionV1BuilderError}; -pub use transaction_v1_hash::TransactionV1Hash; -pub use transaction_v1_header::TransactionV1Header; - -/// A unit of work sent by a client to the network, which when executed can cause global state to -/// be altered. -/// -/// To construct a new `TransactionV1`, use a [`TransactionV1Builder`]. -#[derive(Clone, Eq, Debug)] -#[cfg_attr( - any(feature = "std", test), - derive(Serialize, Deserialize), - serde(deny_unknown_fields) -)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr( - feature = "json-schema", - derive(JsonSchema), - schemars( - description = "A unit of work sent by a client to the network, which when executed can \ - cause global state to be altered." - ) -)] -pub struct TransactionV1 { - hash: TransactionV1Hash, - header: TransactionV1Header, - body: TransactionV1Body, - approvals: BTreeSet, - #[cfg_attr(any(all(feature = "std", feature = "once_cell"), test), serde(skip))] - #[cfg_attr( - all(any(feature = "once_cell", test), feature = "datasize"), - data_size(skip) - )] - #[cfg(any(feature = "once_cell", test))] - is_verified: OnceCell>, -} - -impl TransactionV1 { - /// Called by the `TransactionBuilder` to construct a new `TransactionV1`. - #[cfg(any(feature = "std", test))] - pub(super) fn build( - chain_name: String, - timestamp: Timestamp, - ttl: TimeDiff, - body: TransactionV1Body, - pricing_mode: PricingMode, - payment_amount: Option, - initiator_addr_and_secret_key: InitiatorAddrAndSecretKey, - ) -> TransactionV1 { - let initiator_addr = initiator_addr_and_secret_key.initiator_addr(); - let body_hash = Digest::hash( - body.to_bytes() - .unwrap_or_else(|error| panic!("should serialize body: {}", error)), - ); - let header = TransactionV1Header::new( - chain_name, - timestamp, - ttl, - body_hash, - pricing_mode, - payment_amount, - initiator_addr, - ); - - let hash = header.compute_hash(); - let mut transaction = TransactionV1 { - hash, - header, - body, - approvals: BTreeSet::new(), - #[cfg(any(feature = "once_cell", test))] - is_verified: OnceCell::new(), - }; - - if let Some(secret_key) = initiator_addr_and_secret_key.secret_key() { - transaction.sign(secret_key); - } - transaction - } - - /// Returns the hash identifying this transaction. - pub fn hash(&self) -> &TransactionV1Hash { - &self.hash - } - - /// Returns the name of the chain the transaction should be executed on. - pub fn chain_name(&self) -> &str { - self.header.chain_name() - } - - /// Returns the creation timestamp of the transaction. - pub fn timestamp(&self) -> Timestamp { - self.header.timestamp() - } - - /// Returns the duration after the creation timestamp for which the transaction will stay valid. - /// - /// After this duration has ended, the transaction will be considered expired. - pub fn ttl(&self) -> TimeDiff { - self.header.ttl() - } - - /// Returns `true` if the transaction has expired. - pub fn expired(&self, current_instant: Timestamp) -> bool { - self.header.expired(current_instant) - } - - /// Returns the pricing mode for the transaction. - pub fn pricing_mode(&self) -> &PricingMode { - self.header.pricing_mode() - } - - /// Returns the payment amount for the transaction. - pub fn payment_amount(&self) -> Option { - self.header.payment_amount() - } - - /// Returns the address of the initiator of the transaction. - pub fn initiator_addr(&self) -> &InitiatorAddr { - self.header.initiator_addr() - } - - /// Returns a reference to the header of this transaction. - pub fn header(&self) -> &TransactionV1Header { - &self.header - } - - /// Consumes `self`, returning the header of this transaction. - pub fn take_header(self) -> TransactionV1Header { - self.header - } - - /// Returns the runtime args of the transaction. - pub fn args(&self) -> &RuntimeArgs { - self.body.args() - } - - /// Returns the target of the transaction. - pub fn target(&self) -> &TransactionTarget { - self.body.target() - } - - /// Returns the entry point of the transaction. - pub fn entry_point(&self) -> &TransactionEntryPoint { - self.body.entry_point() - } - - /// Returns the scheduling kind of the transaction. - pub fn scheduling(&self) -> &TransactionScheduling { - self.body.scheduling() - } - - /// Returns the body of this transaction. - pub fn body(&self) -> &TransactionV1Body { - &self.body - } - - /// Returns the approvals for this transaction. - pub fn approvals(&self) -> &BTreeSet { - &self.approvals - } - - /// Adds a signature of this transaction's hash to its approvals. - pub fn sign(&mut self, secret_key: &SecretKey) { - let approval = TransactionV1Approval::create(&self.hash, secret_key); - self.approvals.insert(approval); - } - - /// Returns the `TransactionV1ApprovalsHash` of this transaction's approvals. - pub fn compute_approvals_hash(&self) -> Result { - TransactionV1ApprovalsHash::compute(&self.approvals) - } - - /// Returns `true` if the serialized size of the transaction is not greater than - /// `max_transaction_size`. - #[cfg(any(feature = "std", test))] - fn is_valid_size( - &self, - max_transaction_size: u32, - ) -> Result<(), TransactionV1ExcessiveSizeError> { - let actual_transaction_size = self.serialized_length(); - if actual_transaction_size > max_transaction_size as usize { - return Err(TransactionV1ExcessiveSizeError { - max_transaction_size, - actual_transaction_size, - }); - } - Ok(()) - } - - /// Returns `Ok` if and only if this transaction's body hashes to the value of `body_hash()`, - /// and if this transaction's header hashes to the value claimed as the transaction hash. - pub fn has_valid_hash(&self) -> Result<(), TransactionV1ConfigFailure> { - let body_hash = Digest::hash( - self.body - .to_bytes() - .unwrap_or_else(|error| panic!("should serialize body: {}", error)), - ); - if body_hash != *self.header.body_hash() { - debug!(?self, ?body_hash, "invalid transaction body hash"); - return Err(TransactionV1ConfigFailure::InvalidBodyHash); - } - - let hash = TransactionV1Hash::new(Digest::hash( - self.header - .to_bytes() - .unwrap_or_else(|error| panic!("should serialize header: {}", error)), - )); - if hash != self.hash { - debug!(?self, ?hash, "invalid transaction hash"); - return Err(TransactionV1ConfigFailure::InvalidTransactionHash); - } - Ok(()) - } - - /// Returns `Ok` if and only if: - /// * the transaction hash is correct (see [`TransactionV1::has_valid_hash`] for details) - /// * approvals are non empty, and - /// * all approvals are valid signatures of the signed hash - pub fn verify(&self) -> Result<(), TransactionV1ConfigFailure> { - #[cfg(any(feature = "once_cell", test))] - return self.is_verified.get_or_init(|| self.do_verify()).clone(); - - #[cfg(not(any(feature = "once_cell", test)))] - self.do_verify() - } - - fn do_verify(&self) -> Result<(), TransactionV1ConfigFailure> { - if self.approvals.is_empty() { - debug!(?self, "transaction has no approvals"); - return Err(TransactionV1ConfigFailure::EmptyApprovals); - } - - self.has_valid_hash()?; - - for (index, approval) in self.approvals.iter().enumerate() { - if let Err(error) = crypto::verify(self.hash, approval.signature(), approval.signer()) { - debug!( - ?self, - "failed to verify transaction approval {}: {}", index, error - ); - return Err(TransactionV1ConfigFailure::InvalidApproval { index, error }); - } - } - - Ok(()) - } - - /// Returns `Ok` if and only if: - /// * the chain_name is correct, - /// * the configured parameters are complied with at the given timestamp - #[cfg(any(feature = "std", test))] - pub fn is_config_compliant( - &self, - chain_name: &str, - config: &TransactionConfig, - max_associated_keys: u32, - timestamp_leeway: TimeDiff, - at: Timestamp, - ) -> Result<(), TransactionV1ConfigFailure> { - self.is_valid_size(config.max_transaction_size)?; - - let header = self.header(); - if header.chain_name() != chain_name { - debug!( - transaction_hash = %self.hash(), - transaction_header = %header, - chain_name = %header.chain_name(), - "invalid chain identifier" - ); - return Err(TransactionV1ConfigFailure::InvalidChainName { - expected: chain_name.to_string(), - got: header.chain_name().to_string(), - }); - } - - header.is_valid(config, timestamp_leeway, at, &self.hash)?; - - if self.approvals.len() > max_associated_keys as usize { - debug!( - transaction_hash = %self.hash(), - number_of_approvals = %self.approvals.len(), - max_associated_keys = %max_associated_keys, - "number of transaction approvals exceeds the limit" - ); - return Err(TransactionV1ConfigFailure::ExcessiveApprovals { - got: self.approvals.len() as u32, - max_associated_keys, - }); - } - - if let Some(payment) = self.payment_amount() { - if payment > config.block_gas_limit { - debug!( - amount = %payment, - block_gas_limit = %config.block_gas_limit, - "payment amount exceeds block gas limit" - ); - return Err(TransactionV1ConfigFailure::ExceedsBlockGasLimit { - block_gas_limit: config.block_gas_limit, - got: payment, - }); - } - } - - self.body.is_valid(config) - } - - // This method is not intended to be used by third party crates. - // - // It is required to allow finalized approvals to be injected after reading a transaction from - // storage. - #[doc(hidden)] - pub fn with_approvals(mut self, approvals: BTreeSet) -> Self { - self.approvals = approvals; - self - } - - /// Returns a random, valid but possibly expired transaction. - /// - /// Note that the [`TransactionV1Builder`] can be used to create a random transaction with - /// more specific values. - #[cfg(any(all(feature = "std", feature = "testing"), test))] - pub fn random(rng: &mut TestRng) -> Self { - TransactionV1Builder::new_random(rng).build().unwrap() - } - - /// Turns `self` into an invalid transaction by clearing the `chain_name`, invalidating the - /// transaction header hash. - #[cfg(any(all(feature = "std", feature = "testing"), test))] - pub fn invalidate(&mut self) { - self.header.invalidate(); - } - - /// Used by the `TestTransactionV1Builder` to inject invalid approvals for testing purposes. - #[cfg(any(all(feature = "std", feature = "testing"), test))] - pub(super) fn apply_approvals(&mut self, approvals: Vec) { - self.approvals.extend(approvals); - } -} - -impl hash::Hash for TransactionV1 { - fn hash(&self, state: &mut H) { - // Destructure to make sure we don't accidentally omit fields. - let TransactionV1 { - hash, - header, - body, - approvals, - #[cfg(any(feature = "once_cell", test))] - is_verified: _, - } = self; - hash.hash(state); - header.hash(state); - body.hash(state); - approvals.hash(state); - } -} - -impl PartialEq for TransactionV1 { - fn eq(&self, other: &TransactionV1) -> bool { - // Destructure to make sure we don't accidentally omit fields. - let TransactionV1 { - hash, - header, - body, - approvals, - #[cfg(any(feature = "once_cell", test))] - is_verified: _, - } = self; - *hash == other.hash - && *header == other.header - && *body == other.body - && *approvals == other.approvals - } -} - -impl Ord for TransactionV1 { - fn cmp(&self, other: &TransactionV1) -> cmp::Ordering { - // Destructure to make sure we don't accidentally omit fields. - let TransactionV1 { - hash, - header, - body, - approvals, - #[cfg(any(feature = "once_cell", test))] - is_verified: _, - } = self; - hash.cmp(&other.hash) - .then_with(|| header.cmp(&other.header)) - .then_with(|| body.cmp(&other.body)) - .then_with(|| approvals.cmp(&other.approvals)) - } -} - -impl PartialOrd for TransactionV1 { - fn partial_cmp(&self, other: &TransactionV1) -> Option { - Some(self.cmp(other)) - } -} - -impl ToBytes for TransactionV1 { - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.hash.write_bytes(writer)?; - self.header.write_bytes(writer)?; - self.body.write_bytes(writer)?; - self.approvals.write_bytes(writer) - } - - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - self.hash.serialized_length() - + self.header.serialized_length() - + self.body.serialized_length() - + self.approvals.serialized_length() - } -} - -impl FromBytes for TransactionV1 { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (hash, remainder) = TransactionV1Hash::from_bytes(bytes)?; - let (header, remainder) = TransactionV1Header::from_bytes(remainder)?; - let (body, remainder) = TransactionV1Body::from_bytes(remainder)?; - let (approvals, remainder) = BTreeSet::::from_bytes(remainder)?; - let transaction = TransactionV1 { - hash, - header, - body, - approvals, - #[cfg(any(feature = "once_cell", test))] - is_verified: OnceCell::new(), - }; - Ok((transaction, remainder)) - } -} - -impl Display for TransactionV1 { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - write!( - formatter, - "transaction-v1[{}, {}, approvals: {}]", - self.header, - self.body, - DisplayIter::new(self.approvals.iter()) - ) - } -} - -#[cfg(test)] -mod tests { - use std::time::Duration; - - use super::*; - - const MAX_ASSOCIATED_KEYS: u32 = 5; - - #[test] - fn json_roundtrip() { - let rng = &mut TestRng::new(); - let transaction = TransactionV1::random(rng); - let json_string = serde_json::to_string_pretty(&transaction).unwrap(); - let decoded = serde_json::from_str(&json_string).unwrap(); - assert_eq!(transaction, decoded); - } - - #[test] - fn bincode_roundtrip() { - let rng = &mut TestRng::new(); - let transaction = TransactionV1::random(rng); - let serialized = bincode::serialize(&transaction).unwrap(); - let deserialized = bincode::deserialize(&serialized).unwrap(); - assert_eq!(transaction, deserialized); - } - - #[test] - fn bytesrepr_roundtrip() { - let rng = &mut TestRng::new(); - let transaction = TransactionV1::random(rng); - bytesrepr::test_serialization_roundtrip(transaction.header()); - bytesrepr::test_serialization_roundtrip(&transaction); - } - - #[test] - fn is_valid() { - let rng = &mut TestRng::new(); - let transaction = TransactionV1::random(rng); - assert_eq!( - transaction.is_verified.get(), - None, - "is_verified should initially be None" - ); - transaction.verify().expect("should verify"); - assert_eq!( - transaction.is_verified.get(), - Some(&Ok(())), - "is_verified should be true" - ); - } - - fn check_is_not_valid( - invalid_transaction: TransactionV1, - expected_error: TransactionV1ConfigFailure, - ) { - assert!( - invalid_transaction.is_verified.get().is_none(), - "is_verified should initially be None" - ); - let actual_error = invalid_transaction.verify().unwrap_err(); - - // Ignore the `error_msg` field of `InvalidApproval` when comparing to expected error, as - // this makes the test too fragile. Otherwise expect the actual error should exactly match - // the expected error. - match expected_error { - TransactionV1ConfigFailure::InvalidApproval { - index: expected_index, - .. - } => match actual_error { - TransactionV1ConfigFailure::InvalidApproval { - index: actual_index, - .. - } => { - assert_eq!(actual_index, expected_index); - } - _ => panic!("expected {}, got: {}", expected_error, actual_error), - }, - _ => { - assert_eq!(actual_error, expected_error,); - } - } - - // The actual error should have been lazily initialized correctly. - assert_eq!( - invalid_transaction.is_verified.get(), - Some(&Err(actual_error)), - "is_verified should now be Some" - ); - } - - #[test] - fn not_valid_due_to_invalid_transaction_hash() { - let rng = &mut TestRng::new(); - let mut transaction = TransactionV1::random(rng); - - transaction.invalidate(); - check_is_not_valid( - transaction, - TransactionV1ConfigFailure::InvalidTransactionHash, - ); - } - - #[test] - fn not_valid_due_to_empty_approvals() { - let rng = &mut TestRng::new(); - let transaction = TransactionV1Builder::new_random(rng) - .with_no_secret_key() - .build() - .unwrap(); - assert!(transaction.approvals.is_empty()); - check_is_not_valid(transaction, TransactionV1ConfigFailure::EmptyApprovals) - } - - #[test] - fn not_valid_due_to_invalid_approval() { - let rng = &mut TestRng::new(); - let transaction = TransactionV1Builder::new_random(rng) - .with_invalid_approval(rng) - .build() - .unwrap(); - - // The expected index for the invalid approval will be the first index at which there is an - // approval where the signer is not the account holder. - let account_holder = match transaction.initiator_addr() { - InitiatorAddr::PublicKey(public_key) => public_key.clone(), - InitiatorAddr::AccountHash(_) | InitiatorAddr::EntityAddr(_) => unreachable!(), - }; - let expected_index = transaction - .approvals - .iter() - .enumerate() - .find(|(_, approval)| approval.signer() != &account_holder) - .map(|(index, _)| index) - .unwrap(); - check_is_not_valid( - transaction, - TransactionV1ConfigFailure::InvalidApproval { - index: expected_index, - error: crypto::Error::SignatureError, // This field is ignored in the check. - }, - ); - } - - #[test] - fn is_config_compliant() { - let rng = &mut TestRng::new(); - let chain_name = "net-1"; - let transaction = TransactionV1Builder::new_random(rng) - .with_chain_name(chain_name) - .build() - .unwrap(); - - let transaction_config = TransactionConfig::default(); - let current_timestamp = transaction.timestamp(); - transaction - .is_config_compliant( - chain_name, - &transaction_config, - MAX_ASSOCIATED_KEYS, - TimeDiff::default(), - current_timestamp, - ) - .expect("should be acceptable"); - } - - #[test] - fn not_acceptable_due_to_invalid_chain_name() { - let rng = &mut TestRng::new(); - let expected_chain_name = "net-1"; - let wrong_chain_name = "net-2"; - let transaction_config = TransactionConfig::default(); - - let transaction = TransactionV1Builder::new_random(rng) - .with_chain_name(wrong_chain_name) - .build() - .unwrap(); - - let expected_error = TransactionV1ConfigFailure::InvalidChainName { - expected: expected_chain_name.to_string(), - got: wrong_chain_name.to_string(), - }; - - let current_timestamp = transaction.timestamp(); - assert_eq!( - transaction.is_config_compliant( - expected_chain_name, - &transaction_config, - MAX_ASSOCIATED_KEYS, - TimeDiff::default(), - current_timestamp - ), - Err(expected_error) - ); - assert!( - transaction.is_verified.get().is_none(), - "transaction should not have run expensive `is_verified` call" - ); - } - - #[test] - fn not_acceptable_due_to_excessive_ttl() { - let rng = &mut TestRng::new(); - let chain_name = "net-1"; - let transaction_config = TransactionConfig::default(); - let ttl = transaction_config.max_ttl + TimeDiff::from(Duration::from_secs(1)); - let transaction = TransactionV1Builder::new_random(rng) - .with_ttl(ttl) - .with_chain_name(chain_name) - .build() - .unwrap(); - - let expected_error = TransactionV1ConfigFailure::ExcessiveTimeToLive { - max_ttl: transaction_config.max_ttl, - got: ttl, - }; - - let current_timestamp = transaction.timestamp(); - assert_eq!( - transaction.is_config_compliant( - chain_name, - &transaction_config, - MAX_ASSOCIATED_KEYS, - TimeDiff::default(), - current_timestamp - ), - Err(expected_error) - ); - assert!( - transaction.is_verified.get().is_none(), - "transaction should not have run expensive `is_verified` call" - ); - } - - #[test] - fn not_acceptable_due_to_timestamp_in_future() { - let rng = &mut TestRng::new(); - let chain_name = "net-1"; - let transaction_config = TransactionConfig::default(); - let leeway = TimeDiff::from_seconds(2); - - let transaction = TransactionV1Builder::new_random(rng) - .with_chain_name(chain_name) - .build() - .unwrap(); - let current_timestamp = transaction.timestamp() - leeway - TimeDiff::from_seconds(1); - - let expected_error = TransactionV1ConfigFailure::TimestampInFuture { - validation_timestamp: current_timestamp, - timestamp_leeway: leeway, - got: transaction.timestamp(), - }; - - assert_eq!( - transaction.is_config_compliant( - chain_name, - &transaction_config, - MAX_ASSOCIATED_KEYS, - leeway, - current_timestamp - ), - Err(expected_error) - ); - assert!( - transaction.is_verified.get().is_none(), - "transaction should not have run expensive `is_verified` call" - ); - } - - #[test] - fn not_acceptable_due_to_excessive_approvals() { - let rng = &mut TestRng::new(); - let chain_name = "net-1"; - let transaction_config = TransactionConfig::default(); - let mut transaction = TransactionV1Builder::new_random(rng) - .with_chain_name(chain_name) - .build() - .unwrap(); - - for _ in 0..MAX_ASSOCIATED_KEYS { - transaction.sign(&SecretKey::random(rng)); - } - - let current_timestamp = transaction.timestamp(); - - let expected_error = TransactionV1ConfigFailure::ExcessiveApprovals { - got: MAX_ASSOCIATED_KEYS + 1, - max_associated_keys: MAX_ASSOCIATED_KEYS, - }; - - assert_eq!( - transaction.is_config_compliant( - chain_name, - &transaction_config, - MAX_ASSOCIATED_KEYS, - TimeDiff::default(), - current_timestamp - ), - Err(expected_error) - ); - assert!( - transaction.is_verified.get().is_none(), - "transaction should not have run expensive `is_verified` call" - ); - } -} diff --git a/casper_types_ver_2_0/src/transaction/transaction_v1/errors_v1.rs b/casper_types_ver_2_0/src/transaction/transaction_v1/errors_v1.rs deleted file mode 100644 index d41cedc0..00000000 --- a/casper_types_ver_2_0/src/transaction/transaction_v1/errors_v1.rs +++ /dev/null @@ -1,386 +0,0 @@ -use alloc::string::String; -use core::{ - array::TryFromSliceError, - fmt::{self, Display, Formatter}, -}; -#[cfg(feature = "std")] -use std::error::Error as StdError; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -use serde::Serialize; - -use super::super::TransactionEntryPoint; -#[cfg(doc)] -use super::TransactionV1; -use crate::{crypto, CLType, TimeDiff, Timestamp, U512}; - -/// Returned when a [`TransactionV1`] fails validation. -#[derive(Clone, Eq, PartialEq, Debug)] -#[cfg_attr(feature = "std", derive(Serialize))] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[non_exhaustive] -pub enum TransactionV1ConfigFailure { - /// Invalid chain name. - InvalidChainName { - /// The expected chain name. - expected: String, - /// The transaction's chain name. - got: String, - }, - - /// Transaction is too large. - ExcessiveSize(ExcessiveSizeErrorV1), - - /// Excessive time-to-live. - ExcessiveTimeToLive { - /// The time-to-live limit. - max_ttl: TimeDiff, - /// The transaction's time-to-live. - got: TimeDiff, - }, - - /// Transaction's timestamp is in the future. - TimestampInFuture { - /// The node's timestamp when validating the transaction. - validation_timestamp: Timestamp, - /// Any configured leeway added to `validation_timestamp`. - timestamp_leeway: TimeDiff, - /// The transaction's timestamp. - got: Timestamp, - }, - - /// The provided body hash does not match the actual hash of the body. - InvalidBodyHash, - - /// The provided transaction hash does not match the actual hash of the transaction. - InvalidTransactionHash, - - /// The transaction has no approvals. - EmptyApprovals, - - /// Invalid approval. - InvalidApproval { - /// The index of the approval at fault. - index: usize, - /// The approval verification error. - error: crypto::Error, - }, - - /// Excessive length of transaction's runtime args. - ExcessiveArgsLength { - /// The byte size limit of runtime arguments. - max_length: usize, - /// The length of the transaction's runtime arguments. - got: usize, - }, - - /// The amount of approvals on the transaction exceeds the configured limit. - ExcessiveApprovals { - /// The chainspec limit for max_associated_keys. - max_associated_keys: u32, - /// Number of approvals on the transaction. - got: u32, - }, - - /// The payment amount associated with the transaction exceeds the block gas limit. - ExceedsBlockGasLimit { - /// Configured block gas limit. - block_gas_limit: u64, - /// The payment amount received. - got: u64, - }, - - /// Missing a required runtime arg. - MissingArg { - /// The name of the missing arg. - arg_name: String, - }, - - /// Given runtime arg is not expected type. - UnexpectedArgType { - /// The name of the invalid arg. - arg_name: String, - /// The expected type for the given runtime arg. - expected: CLType, - /// The provided type of the given runtime arg. - got: CLType, - }, - - /// Insufficient transfer amount. - InsufficientTransferAmount { - /// The minimum transfer amount. - minimum: u64, - /// The attempted transfer amount. - attempted: U512, - }, - - /// The entry point for this transaction target cannot not be `TransactionEntryPoint::Custom`. - EntryPointCannotBeCustom { - /// The invalid entry point. - entry_point: TransactionEntryPoint, - }, - - /// The entry point for this transaction target must be `TransactionEntryPoint::Custom`. - EntryPointMustBeCustom { - /// The invalid entry point. - entry_point: TransactionEntryPoint, - }, - - /// The transaction has empty module bytes. - EmptyModuleBytes, -} - -impl Display for TransactionV1ConfigFailure { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - match self { - TransactionV1ConfigFailure::InvalidChainName { expected, got } => { - write!( - formatter, - "invalid chain name: expected {expected}, got {got}" - ) - } - TransactionV1ConfigFailure::ExcessiveSize(error) => { - write!(formatter, "transaction size too large: {error}") - } - TransactionV1ConfigFailure::ExcessiveTimeToLive { max_ttl, got } => { - write!( - formatter, - "time-to-live of {got} exceeds limit of {max_ttl}" - ) - } - TransactionV1ConfigFailure::TimestampInFuture { - validation_timestamp, - timestamp_leeway, - got, - } => { - write!( - formatter, - "timestamp of {got} is later than node's validation timestamp of \ - {validation_timestamp} plus leeway of {timestamp_leeway}" - ) - } - TransactionV1ConfigFailure::InvalidBodyHash => { - write!( - formatter, - "the provided hash does not match the actual hash of the transaction body" - ) - } - TransactionV1ConfigFailure::InvalidTransactionHash => { - write!( - formatter, - "the provided hash does not match the actual hash of the transaction" - ) - } - TransactionV1ConfigFailure::EmptyApprovals => { - write!(formatter, "the transaction has no approvals") - } - TransactionV1ConfigFailure::InvalidApproval { index, error } => { - write!( - formatter, - "the transaction approval at index {index} is invalid: {error}" - ) - } - TransactionV1ConfigFailure::ExcessiveArgsLength { max_length, got } => { - write!( - formatter, - "serialized transaction runtime args of {got} bytes exceeds limit of \ - {max_length} bytes" - ) - } - TransactionV1ConfigFailure::ExcessiveApprovals { - max_associated_keys, - got, - } => { - write!( - formatter, - "number of transaction approvals {got} exceeds the maximum number of \ - associated keys {max_associated_keys}", - ) - } - TransactionV1ConfigFailure::ExceedsBlockGasLimit { - block_gas_limit, - got, - } => { - write!( - formatter, - "payment amount of {got} exceeds the block gas limit of {block_gas_limit}" - ) - } - TransactionV1ConfigFailure::MissingArg { arg_name } => { - write!(formatter, "missing required runtime argument '{arg_name}'") - } - TransactionV1ConfigFailure::UnexpectedArgType { - arg_name, - expected, - got, - } => { - write!( - formatter, - "expected type of '{arg_name}' runtime argument to be {expected}, but got {got}" - ) - } - TransactionV1ConfigFailure::InsufficientTransferAmount { minimum, attempted } => { - write!( - formatter, - "insufficient transfer amount; minimum: {minimum} attempted: {attempted}" - ) - } - TransactionV1ConfigFailure::EntryPointCannotBeCustom { entry_point } => { - write!(formatter, "entry point cannot be custom: {entry_point}") - } - TransactionV1ConfigFailure::EntryPointMustBeCustom { entry_point } => { - write!(formatter, "entry point must be custom: {entry_point}") - } - TransactionV1ConfigFailure::EmptyModuleBytes => { - write!(formatter, "the transaction has empty module bytes") - } - } - } -} - -impl From for TransactionV1ConfigFailure { - fn from(error: ExcessiveSizeErrorV1) -> Self { - TransactionV1ConfigFailure::ExcessiveSize(error) - } -} - -#[cfg(feature = "std")] -impl StdError for TransactionV1ConfigFailure { - fn source(&self) -> Option<&(dyn StdError + 'static)> { - match self { - TransactionV1ConfigFailure::InvalidApproval { error, .. } => Some(error), - TransactionV1ConfigFailure::InvalidChainName { .. } - | TransactionV1ConfigFailure::ExcessiveSize(_) - | TransactionV1ConfigFailure::ExcessiveTimeToLive { .. } - | TransactionV1ConfigFailure::TimestampInFuture { .. } - | TransactionV1ConfigFailure::InvalidBodyHash - | TransactionV1ConfigFailure::InvalidTransactionHash - | TransactionV1ConfigFailure::EmptyApprovals - | TransactionV1ConfigFailure::ExcessiveArgsLength { .. } - | TransactionV1ConfigFailure::ExcessiveApprovals { .. } - | TransactionV1ConfigFailure::ExceedsBlockGasLimit { .. } - | TransactionV1ConfigFailure::MissingArg { .. } - | TransactionV1ConfigFailure::UnexpectedArgType { .. } - | TransactionV1ConfigFailure::InsufficientTransferAmount { .. } - | TransactionV1ConfigFailure::EntryPointCannotBeCustom { .. } - | TransactionV1ConfigFailure::EntryPointMustBeCustom { .. } - | TransactionV1ConfigFailure::EmptyModuleBytes => None, - } - } -} - -/// Error returned when a transaction is too large. -#[derive(Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Debug, Serialize)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -pub struct ExcessiveSizeErrorV1 { - /// The maximum permitted serialized transaction size, in bytes. - pub max_transaction_size: u32, - /// The serialized size of the transaction provided, in bytes. - pub actual_transaction_size: usize, -} - -impl Display for ExcessiveSizeErrorV1 { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - write!( - formatter, - "transaction size of {} bytes exceeds limit of {}", - self.actual_transaction_size, self.max_transaction_size - ) - } -} - -#[cfg(feature = "std")] -impl StdError for ExcessiveSizeErrorV1 {} - -/// Errors other than validation failures relating to Transactions. -#[derive(Debug)] -#[non_exhaustive] -pub enum ErrorV1 { - /// Error while encoding to JSON. - EncodeToJson(serde_json::Error), - - /// Error while decoding from JSON. - DecodeFromJson(DecodeFromJsonErrorV1), -} - -impl From for ErrorV1 { - fn from(error: serde_json::Error) -> Self { - ErrorV1::EncodeToJson(error) - } -} - -impl From for ErrorV1 { - fn from(error: DecodeFromJsonErrorV1) -> Self { - ErrorV1::DecodeFromJson(error) - } -} - -impl Display for ErrorV1 { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - match self { - ErrorV1::EncodeToJson(error) => { - write!(formatter, "encoding to json: {}", error) - } - ErrorV1::DecodeFromJson(error) => { - write!(formatter, "decoding from json: {}", error) - } - } - } -} - -#[cfg(feature = "std")] -impl StdError for ErrorV1 { - fn source(&self) -> Option<&(dyn StdError + 'static)> { - match self { - ErrorV1::EncodeToJson(error) => Some(error), - ErrorV1::DecodeFromJson(error) => Some(error), - } - } -} - -/// Error while decoding a `TransactionV1` from JSON. -#[derive(Debug)] -#[non_exhaustive] -pub enum DecodeFromJsonErrorV1 { - /// Failed to decode from base 16. - FromHex(base16::DecodeError), - - /// Failed to convert slice to array. - TryFromSlice(TryFromSliceError), -} - -impl From for DecodeFromJsonErrorV1 { - fn from(error: base16::DecodeError) -> Self { - DecodeFromJsonErrorV1::FromHex(error) - } -} - -impl From for DecodeFromJsonErrorV1 { - fn from(error: TryFromSliceError) -> Self { - DecodeFromJsonErrorV1::TryFromSlice(error) - } -} - -impl Display for DecodeFromJsonErrorV1 { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - match self { - DecodeFromJsonErrorV1::FromHex(error) => { - write!(formatter, "{}", error) - } - DecodeFromJsonErrorV1::TryFromSlice(error) => { - write!(formatter, "{}", error) - } - } - } -} - -#[cfg(feature = "std")] -impl StdError for DecodeFromJsonErrorV1 { - fn source(&self) -> Option<&(dyn StdError + 'static)> { - match self { - DecodeFromJsonErrorV1::FromHex(error) => Some(error), - DecodeFromJsonErrorV1::TryFromSlice(error) => Some(error), - } - } -} diff --git a/casper_types_ver_2_0/src/transaction/transaction_v1/finalized_transaction_v1_approvals.rs b/casper_types_ver_2_0/src/transaction/transaction_v1/finalized_transaction_v1_approvals.rs deleted file mode 100644 index a10c4ed2..00000000 --- a/casper_types_ver_2_0/src/transaction/transaction_v1/finalized_transaction_v1_approvals.rs +++ /dev/null @@ -1,78 +0,0 @@ -use alloc::{collections::BTreeSet, vec::Vec}; -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(any(feature = "testing", test))] -use rand::Rng; -use serde::{Deserialize, Serialize}; - -#[cfg(any(feature = "testing", test))] -use crate::testing::TestRng; -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - TransactionV1Approval, -}; - -/// A set of approvals that has been agreed upon by consensus to approve of a specific -/// `TransactionV1`. -#[derive(Clone, Eq, PartialEq, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -pub struct FinalizedTransactionV1Approvals(BTreeSet); - -impl FinalizedTransactionV1Approvals { - /// Creates a new set of finalized transaction approvals. - pub fn new(approvals: BTreeSet) -> Self { - Self(approvals) - } - - /// Returns the inner `BTreeSet` of approvals. - pub fn inner(&self) -> &BTreeSet { - &self.0 - } - - /// Converts this set of finalized approvals into the inner `BTreeSet`. - pub fn into_inner(self) -> BTreeSet { - self.0 - } - - /// Returns a random FinalizedTransactionV1Approvals. - #[cfg(any(feature = "testing", test))] - pub fn random(rng: &mut TestRng) -> Self { - let count = rng.gen_range(1..10); - let approvals = (0..count) - .map(|_| TransactionV1Approval::random(rng)) - .collect(); - FinalizedTransactionV1Approvals(approvals) - } -} -impl ToBytes for FinalizedTransactionV1Approvals { - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.0.write_bytes(writer) - } - - fn to_bytes(&self) -> Result, bytesrepr::Error> { - self.0.to_bytes() - } - - fn serialized_length(&self) -> usize { - self.0.serialized_length() - } -} - -impl FromBytes for FinalizedTransactionV1Approvals { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (approvals, remainder) = BTreeSet::::from_bytes(bytes)?; - Ok((FinalizedTransactionV1Approvals(approvals), remainder)) - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn bytesrepr_roundtrip() { - let rng = &mut TestRng::new(); - let approvals = FinalizedTransactionV1Approvals::random(rng); - bytesrepr::test_serialization_roundtrip(&approvals); - } -} diff --git a/casper_types_ver_2_0/src/transaction/transaction_v1/transaction_v1_approval.rs b/casper_types_ver_2_0/src/transaction/transaction_v1/transaction_v1_approval.rs deleted file mode 100644 index 0d6cb087..00000000 --- a/casper_types_ver_2_0/src/transaction/transaction_v1/transaction_v1_approval.rs +++ /dev/null @@ -1,102 +0,0 @@ -use alloc::vec::Vec; -use core::fmt::{self, Display, Formatter}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -use super::TransactionV1Hash; -#[cfg(any(all(feature = "std", feature = "testing"), test))] -use crate::testing::TestRng; -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - crypto, PublicKey, SecretKey, Signature, -}; - -/// A struct containing a signature of a transaction hash and the public key of the signer. -#[derive(Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Serialize, Deserialize, Debug)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[serde(deny_unknown_fields)] -pub struct TransactionV1Approval { - signer: PublicKey, - signature: Signature, -} - -impl TransactionV1Approval { - /// Creates an approval by signing the given transaction hash using the given secret key. - pub fn create(hash: &TransactionV1Hash, secret_key: &SecretKey) -> Self { - let signer = PublicKey::from(secret_key); - let signature = crypto::sign(hash, secret_key, &signer); - Self { signer, signature } - } - - /// Returns a new approval. - pub fn new(signer: PublicKey, signature: Signature) -> Self { - Self { signer, signature } - } - - /// Returns the public key of the approval's signer. - pub fn signer(&self) -> &PublicKey { - &self.signer - } - - /// Returns the approval signature. - pub fn signature(&self) -> &Signature { - &self.signature - } - - /// Returns a random `TransactionV1Approval`. - #[cfg(any(all(feature = "std", feature = "testing"), test))] - pub fn random(rng: &mut TestRng) -> Self { - let hash = TransactionV1Hash::random(rng); - let secret_key = SecretKey::random(rng); - TransactionV1Approval::create(&hash, &secret_key) - } -} - -impl Display for TransactionV1Approval { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - write!(formatter, "approval({})", self.signer) - } -} - -impl ToBytes for TransactionV1Approval { - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.signer.write_bytes(writer)?; - self.signature.write_bytes(writer) - } - - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - self.signer.serialized_length() + self.signature.serialized_length() - } -} - -impl FromBytes for TransactionV1Approval { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (signer, remainder) = PublicKey::from_bytes(bytes)?; - let (signature, remainder) = Signature::from_bytes(remainder)?; - let approval = TransactionV1Approval { signer, signature }; - Ok((approval, remainder)) - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn bytesrepr_roundtrip() { - let rng = &mut TestRng::new(); - let approval = TransactionV1Approval::random(rng); - bytesrepr::test_serialization_roundtrip(&approval); - } -} diff --git a/casper_types_ver_2_0/src/transaction/transaction_v1/transaction_v1_approvals_hash.rs b/casper_types_ver_2_0/src/transaction/transaction_v1/transaction_v1_approvals_hash.rs deleted file mode 100644 index cf148819..00000000 --- a/casper_types_ver_2_0/src/transaction/transaction_v1/transaction_v1_approvals_hash.rs +++ /dev/null @@ -1,114 +0,0 @@ -use alloc::{collections::BTreeSet, vec::Vec}; -use core::fmt::{self, Display, Formatter}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(any(feature = "testing", test))] -use rand::Rng; -use serde::{Deserialize, Serialize}; - -#[cfg(doc)] -use super::TransactionV1; -use super::TransactionV1Approval; -#[cfg(any(feature = "testing", test))] -use crate::testing::TestRng; -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - Digest, -}; - -/// The cryptographic hash of the bytesrepr-encoded set of approvals for a single [`TransactionV1`]. -#[derive( - Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Serialize, Deserialize, Debug, Default, -)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[serde(deny_unknown_fields)] -pub struct TransactionV1ApprovalsHash(Digest); - -impl TransactionV1ApprovalsHash { - /// The number of bytes in a `TransactionV1ApprovalsHash` digest. - pub const LENGTH: usize = Digest::LENGTH; - - /// Constructs a new `TransactionV1ApprovalsHash` by bytesrepr-encoding `approvals` and creating - /// a [`Digest`] of this. - pub fn compute(approvals: &BTreeSet) -> Result { - let digest = Digest::hash(approvals.to_bytes()?); - Ok(TransactionV1ApprovalsHash(digest)) - } - - /// Returns the wrapped inner digest. - pub fn inner(&self) -> &Digest { - &self.0 - } - - /// Returns a new `TransactionV1ApprovalsHash` directly initialized with the provided bytes; no - /// hashing is done. - #[cfg(any(feature = "testing", test))] - pub const fn from_raw(raw_digest: [u8; Self::LENGTH]) -> Self { - TransactionV1ApprovalsHash(Digest::from_raw(raw_digest)) - } - - /// Returns a random `TransactionV1ApprovalsHash`. - #[cfg(any(feature = "testing", test))] - pub fn random(rng: &mut TestRng) -> Self { - let hash = rng.gen::<[u8; Digest::LENGTH]>().into(); - TransactionV1ApprovalsHash(hash) - } -} - -impl From for Digest { - fn from(hash: TransactionV1ApprovalsHash) -> Self { - hash.0 - } -} - -impl From for TransactionV1ApprovalsHash { - fn from(digest: Digest) -> Self { - Self(digest) - } -} - -impl Display for TransactionV1ApprovalsHash { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - write!(formatter, "transaction-v1-approvals-hash({})", self.0,) - } -} - -impl AsRef<[u8]> for TransactionV1ApprovalsHash { - fn as_ref(&self) -> &[u8] { - self.0.as_ref() - } -} - -impl ToBytes for TransactionV1ApprovalsHash { - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.0.write_bytes(writer) - } - - fn to_bytes(&self) -> Result, bytesrepr::Error> { - self.0.to_bytes() - } - - fn serialized_length(&self) -> usize { - self.0.serialized_length() - } -} - -impl FromBytes for TransactionV1ApprovalsHash { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - Digest::from_bytes(bytes) - .map(|(inner, remainder)| (TransactionV1ApprovalsHash(inner), remainder)) - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn bytesrepr_roundtrip() { - let rng = &mut TestRng::new(); - let hash = TransactionV1ApprovalsHash::random(rng); - bytesrepr::test_serialization_roundtrip(&hash); - } -} diff --git a/casper_types_ver_2_0/src/transaction/transaction_v1/transaction_v1_body.rs b/casper_types_ver_2_0/src/transaction/transaction_v1/transaction_v1_body.rs deleted file mode 100644 index edc515df..00000000 --- a/casper_types_ver_2_0/src/transaction/transaction_v1/transaction_v1_body.rs +++ /dev/null @@ -1,426 +0,0 @@ -#[cfg(any(feature = "std", test))] -pub(super) mod arg_handling; - -use alloc::vec::Vec; -use core::fmt::{self, Display, Formatter}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(any(feature = "testing", test))] -use rand::{Rng, RngCore}; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -#[cfg(any(feature = "std", test))] -use serde::{Deserialize, Serialize}; -#[cfg(any(feature = "std", test))] -use tracing::debug; - -use super::super::{RuntimeArgs, TransactionEntryPoint, TransactionScheduling, TransactionTarget}; -#[cfg(doc)] -use super::TransactionV1; -#[cfg(any(feature = "std", test))] -use super::{TransactionConfig, TransactionV1ConfigFailure}; -use crate::bytesrepr::{self, FromBytes, ToBytes}; -#[cfg(any(feature = "testing", test))] -use crate::{ - bytesrepr::Bytes, testing::TestRng, PublicKey, TransactionInvocationTarget, TransactionRuntime, - TransactionSessionKind, -}; - -/// The body of a [`TransactionV1`]. -#[derive(Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Debug)] -#[cfg_attr( - any(feature = "std", test), - derive(Serialize, Deserialize), - serde(deny_unknown_fields) -)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr( - feature = "json-schema", - derive(JsonSchema), - schemars(description = "Body of a `TransactionV1`.") -)] -pub struct TransactionV1Body { - pub(super) args: RuntimeArgs, - pub(super) target: TransactionTarget, - pub(super) entry_point: TransactionEntryPoint, - pub(super) scheduling: TransactionScheduling, -} - -impl TransactionV1Body { - /// Returns a new `TransactionV1Body`. - pub fn new( - args: RuntimeArgs, - target: TransactionTarget, - entry_point: TransactionEntryPoint, - scheduling: TransactionScheduling, - ) -> Self { - TransactionV1Body { - args, - target, - entry_point, - scheduling, - } - } - - /// Returns the runtime args of the transaction. - pub fn args(&self) -> &RuntimeArgs { - &self.args - } - - /// Returns the target of the transaction. - pub fn target(&self) -> &TransactionTarget { - &self.target - } - - /// Returns the entry point of the transaction. - pub fn entry_point(&self) -> &TransactionEntryPoint { - &self.entry_point - } - - /// Returns the scheduling kind of the transaction. - pub fn scheduling(&self) -> &TransactionScheduling { - &self.scheduling - } - - #[cfg(any(feature = "std", test))] - pub(super) fn is_valid( - &self, - config: &TransactionConfig, - ) -> Result<(), TransactionV1ConfigFailure> { - let args_length = self.args.serialized_length(); - if args_length > config.transaction_v1_config.max_args_length as usize { - debug!( - args_length, - max_args_length = config.transaction_v1_config.max_args_length, - "transaction runtime args excessive size" - ); - return Err(TransactionV1ConfigFailure::ExcessiveArgsLength { - max_length: config.transaction_v1_config.max_args_length as usize, - got: args_length, - }); - } - - match &self.target { - TransactionTarget::Native => match self.entry_point { - TransactionEntryPoint::Custom(_) => { - debug!( - entry_point = %self.entry_point, - "native transaction cannot have custom entry point" - ); - Err(TransactionV1ConfigFailure::EntryPointCannotBeCustom { - entry_point: self.entry_point.clone(), - }) - } - TransactionEntryPoint::Transfer => arg_handling::has_valid_transfer_args( - &self.args, - config.native_transfer_minimum_motes, - ), - TransactionEntryPoint::AddBid => arg_handling::has_valid_add_bid_args(&self.args), - TransactionEntryPoint::WithdrawBid => { - arg_handling::has_valid_withdraw_bid_args(&self.args) - } - TransactionEntryPoint::Delegate => { - arg_handling::has_valid_delegate_args(&self.args) - } - TransactionEntryPoint::Undelegate => { - arg_handling::has_valid_undelegate_args(&self.args) - } - TransactionEntryPoint::Redelegate => { - arg_handling::has_valid_redelegate_args(&self.args) - } - }, - TransactionTarget::Stored { .. } => match &self.entry_point { - TransactionEntryPoint::Custom(_) => Ok(()), - TransactionEntryPoint::Transfer - | TransactionEntryPoint::AddBid - | TransactionEntryPoint::WithdrawBid - | TransactionEntryPoint::Delegate - | TransactionEntryPoint::Undelegate - | TransactionEntryPoint::Redelegate => { - debug!( - entry_point = %self.entry_point, - "transaction targeting stored entity/package must have custom entry point" - ); - Err(TransactionV1ConfigFailure::EntryPointMustBeCustom { - entry_point: self.entry_point.clone(), - }) - } - }, - TransactionTarget::Session { module_bytes, .. } => match &self.entry_point { - TransactionEntryPoint::Custom(_) => { - if module_bytes.is_empty() { - debug!("transaction with session code must not have empty module bytes"); - return Err(TransactionV1ConfigFailure::EmptyModuleBytes); - } - Ok(()) - } - TransactionEntryPoint::Transfer - | TransactionEntryPoint::AddBid - | TransactionEntryPoint::WithdrawBid - | TransactionEntryPoint::Delegate - | TransactionEntryPoint::Undelegate - | TransactionEntryPoint::Redelegate => { - debug!( - entry_point = %self.entry_point, - "transaction with session code must have custom entry point" - ); - Err(TransactionV1ConfigFailure::EntryPointMustBeCustom { - entry_point: self.entry_point.clone(), - }) - } - }, - } - } - - /// Returns a random `TransactionV1Body`. - #[cfg(any(feature = "testing", test))] - pub fn random(rng: &mut TestRng) -> Self { - match rng.gen_range(0..8) { - 0 => { - let source = rng.gen(); - let target = rng.gen(); - let amount = rng.gen_range( - TransactionConfig::default().native_transfer_minimum_motes..=u64::MAX, - ); - let maybe_to = rng.gen::().then(|| rng.gen()); - let maybe_id = rng.gen::().then(|| rng.gen()); - let args = - arg_handling::new_transfer_args(source, target, amount, maybe_to, maybe_id) - .unwrap(); - TransactionV1Body::new( - args, - TransactionTarget::Native, - TransactionEntryPoint::Transfer, - TransactionScheduling::random(rng), - ) - } - 1 => { - let public_key = PublicKey::random(rng); - let delegation_rate = rng.gen(); - let amount = rng.gen::(); - let args = - arg_handling::new_add_bid_args(public_key, delegation_rate, amount).unwrap(); - TransactionV1Body::new( - args, - TransactionTarget::Native, - TransactionEntryPoint::AddBid, - TransactionScheduling::random(rng), - ) - } - 2 => { - let public_key = PublicKey::random(rng); - let amount = rng.gen::(); - let args = arg_handling::new_withdraw_bid_args(public_key, amount).unwrap(); - TransactionV1Body::new( - args, - TransactionTarget::Native, - TransactionEntryPoint::WithdrawBid, - TransactionScheduling::random(rng), - ) - } - 3 => { - let delegator = PublicKey::random(rng); - let validator = PublicKey::random(rng); - let amount = rng.gen::(); - let args = arg_handling::new_delegate_args(delegator, validator, amount).unwrap(); - TransactionV1Body::new( - args, - TransactionTarget::Native, - TransactionEntryPoint::Delegate, - TransactionScheduling::random(rng), - ) - } - 4 => { - let delegator = PublicKey::random(rng); - let validator = PublicKey::random(rng); - let amount = rng.gen::(); - let args = arg_handling::new_undelegate_args(delegator, validator, amount).unwrap(); - TransactionV1Body::new( - args, - TransactionTarget::Native, - TransactionEntryPoint::Undelegate, - TransactionScheduling::random(rng), - ) - } - 5 => { - let delegator = PublicKey::random(rng); - let validator = PublicKey::random(rng); - let amount = rng.gen::(); - let new_validator = PublicKey::random(rng); - let args = - arg_handling::new_redelegate_args(delegator, validator, amount, new_validator) - .unwrap(); - TransactionV1Body::new( - args, - TransactionTarget::Native, - TransactionEntryPoint::Redelegate, - TransactionScheduling::random(rng), - ) - } - 6 => { - let target = TransactionTarget::Stored { - id: TransactionInvocationTarget::random(rng), - runtime: TransactionRuntime::VmCasperV1, - }; - TransactionV1Body::new( - RuntimeArgs::random(rng), - target, - TransactionEntryPoint::Custom(rng.random_string(1..11)), - TransactionScheduling::random(rng), - ) - } - 7 => { - let mut buffer = vec![0u8; rng.gen_range(0..100)]; - rng.fill_bytes(buffer.as_mut()); - let target = TransactionTarget::Session { - kind: TransactionSessionKind::random(rng), - module_bytes: Bytes::from(buffer), - runtime: TransactionRuntime::VmCasperV1, - }; - TransactionV1Body::new( - RuntimeArgs::random(rng), - target, - TransactionEntryPoint::Custom(rng.random_string(1..11)), - TransactionScheduling::random(rng), - ) - } - _ => unreachable!(), - } - } -} - -impl Display for TransactionV1Body { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - write!( - formatter, - "v1-body({} {} {})", - self.target, self.entry_point, self.scheduling - ) - } -} - -impl ToBytes for TransactionV1Body { - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.args.write_bytes(writer)?; - self.target.write_bytes(writer)?; - self.entry_point.write_bytes(writer)?; - self.scheduling.write_bytes(writer) - } - - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - self.args.serialized_length() - + self.target.serialized_length() - + self.entry_point.serialized_length() - + self.scheduling.serialized_length() - } -} - -impl FromBytes for TransactionV1Body { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (args, remainder) = RuntimeArgs::from_bytes(bytes)?; - let (target, remainder) = TransactionTarget::from_bytes(remainder)?; - let (entry_point, remainder) = TransactionEntryPoint::from_bytes(remainder)?; - let (scheduling, remainder) = TransactionScheduling::from_bytes(remainder)?; - let body = TransactionV1Body::new(args, target, entry_point, scheduling); - Ok((body, remainder)) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::runtime_args; - - #[test] - fn bytesrepr_roundtrip() { - let rng = &mut TestRng::new(); - let body = TransactionV1Body::random(rng); - bytesrepr::test_serialization_roundtrip(&body); - } - - #[test] - fn not_acceptable_due_to_excessive_args_length() { - let rng = &mut TestRng::new(); - let mut config = TransactionConfig::default(); - config.transaction_v1_config.max_args_length = 10; - let mut body = TransactionV1Body::random(rng); - body.args = runtime_args! {"a" => 1_u8}; - - let expected_error = TransactionV1ConfigFailure::ExcessiveArgsLength { - max_length: 10, - got: 15, - }; - - assert_eq!(body.is_valid(&config,), Err(expected_error)); - } - - #[test] - fn not_acceptable_due_to_custom_entry_point_in_native() { - let rng = &mut TestRng::new(); - let public_key = PublicKey::random(rng); - let amount = rng.gen::(); - let args = arg_handling::new_withdraw_bid_args(public_key, amount).unwrap(); - let entry_point = TransactionEntryPoint::Custom("call".to_string()); - let body = TransactionV1Body::new( - args, - TransactionTarget::Native, - entry_point.clone(), - TransactionScheduling::random(rng), - ); - - let expected_error = TransactionV1ConfigFailure::EntryPointCannotBeCustom { entry_point }; - - let config = TransactionConfig::default(); - assert_eq!(body.is_valid(&config,), Err(expected_error)); - } - - #[test] - fn not_acceptable_due_to_non_custom_entry_point_in_stored_or_session() { - let rng = &mut TestRng::new(); - let config = TransactionConfig::default(); - - let mut check = |entry_point: TransactionEntryPoint| { - let stored_target = TransactionTarget::new_stored( - TransactionInvocationTarget::InvocableEntity([0; 32]), - TransactionRuntime::VmCasperV1, - ); - let session_target = TransactionTarget::new_session( - TransactionSessionKind::Standard, - Bytes::from(vec![1]), - TransactionRuntime::VmCasperV1, - ); - - let stored_body = TransactionV1Body::new( - RuntimeArgs::new(), - stored_target, - entry_point.clone(), - TransactionScheduling::random(rng), - ); - let session_body = TransactionV1Body::new( - RuntimeArgs::new(), - session_target, - entry_point.clone(), - TransactionScheduling::random(rng), - ); - - let expected_error = TransactionV1ConfigFailure::EntryPointMustBeCustom { entry_point }; - - assert_eq!(stored_body.is_valid(&config,), Err(expected_error.clone())); - assert_eq!(session_body.is_valid(&config,), Err(expected_error)); - }; - - check(TransactionEntryPoint::Transfer); - check(TransactionEntryPoint::AddBid); - check(TransactionEntryPoint::WithdrawBid); - check(TransactionEntryPoint::Delegate); - check(TransactionEntryPoint::Undelegate); - check(TransactionEntryPoint::Redelegate); - } -} diff --git a/casper_types_ver_2_0/src/transaction/transaction_v1/transaction_v1_body/arg_handling.rs b/casper_types_ver_2_0/src/transaction/transaction_v1/transaction_v1_body/arg_handling.rs deleted file mode 100644 index bc0ac80a..00000000 --- a/casper_types_ver_2_0/src/transaction/transaction_v1/transaction_v1_body/arg_handling.rs +++ /dev/null @@ -1,783 +0,0 @@ -use core::marker::PhantomData; - -use tracing::debug; - -use super::super::TransactionV1ConfigFailure; -use crate::{ - account::AccountHash, - bytesrepr::{FromBytes, ToBytes}, - CLTyped, CLValue, CLValueError, PublicKey, RuntimeArgs, URef, U512, -}; - -const TRANSFER_ARG_SOURCE: RequiredArg = RequiredArg::new("source"); -const TRANSFER_ARG_TARGET: RequiredArg = RequiredArg::new("target"); -const TRANSFER_ARG_AMOUNT: RequiredArg = RequiredArg::new("amount"); -const TRANSFER_ARG_TO: OptionalArg = OptionalArg::new("to"); -const TRANSFER_ARG_ID: OptionalArg = OptionalArg::new("id"); - -const ADD_BID_ARG_PUBLIC_KEY: RequiredArg = RequiredArg::new("public_key"); -const ADD_BID_ARG_DELEGATION_RATE: RequiredArg = RequiredArg::new("delegation_rate"); -const ADD_BID_ARG_AMOUNT: RequiredArg = RequiredArg::new("amount"); - -const WITHDRAW_BID_ARG_PUBLIC_KEY: RequiredArg = RequiredArg::new("public_key"); -const WITHDRAW_BID_ARG_AMOUNT: RequiredArg = RequiredArg::new("amount"); - -const DELEGATE_ARG_DELEGATOR: RequiredArg = RequiredArg::new("delegator"); -const DELEGATE_ARG_VALIDATOR: RequiredArg = RequiredArg::new("validator"); -const DELEGATE_ARG_AMOUNT: RequiredArg = RequiredArg::new("amount"); - -const UNDELEGATE_ARG_DELEGATOR: RequiredArg = RequiredArg::new("delegator"); -const UNDELEGATE_ARG_VALIDATOR: RequiredArg = RequiredArg::new("validator"); -const UNDELEGATE_ARG_AMOUNT: RequiredArg = RequiredArg::new("amount"); - -const REDELEGATE_ARG_DELEGATOR: RequiredArg = RequiredArg::new("delegator"); -const REDELEGATE_ARG_VALIDATOR: RequiredArg = RequiredArg::new("validator"); -const REDELEGATE_ARG_AMOUNT: RequiredArg = RequiredArg::new("amount"); -const REDELEGATE_ARG_NEW_VALIDATOR: RequiredArg = RequiredArg::new("new_validator"); - -struct RequiredArg { - name: &'static str, - _phantom: PhantomData, -} - -impl RequiredArg { - const fn new(name: &'static str) -> Self { - Self { - name, - _phantom: PhantomData, - } - } - - fn get(&self, args: &RuntimeArgs) -> Result - where - T: CLTyped + FromBytes, - { - let cl_value = args.get(self.name).ok_or_else(|| { - debug!("missing required runtime argument '{}'", self.name); - TransactionV1ConfigFailure::MissingArg { - arg_name: self.name.to_string(), - } - })?; - parse_cl_value(cl_value, self.name) - } - - fn insert(&self, args: &mut RuntimeArgs, value: T) -> Result<(), CLValueError> - where - T: CLTyped + ToBytes, - { - args.insert(self.name, value) - } -} - -struct OptionalArg { - name: &'static str, - _phantom: PhantomData, -} - -impl OptionalArg { - const fn new(name: &'static str) -> Self { - Self { - name, - _phantom: PhantomData, - } - } - - fn get(&self, args: &RuntimeArgs) -> Result, TransactionV1ConfigFailure> - where - T: CLTyped + FromBytes, - { - let cl_value = match args.get(self.name) { - Some(value) => value, - None => return Ok(None), - }; - let value = parse_cl_value(cl_value, self.name)?; - Ok(value) - } - - fn insert(&self, args: &mut RuntimeArgs, value: T) -> Result<(), CLValueError> - where - T: CLTyped + ToBytes, - { - args.insert(self.name, Some(value)) - } -} - -fn parse_cl_value( - cl_value: &CLValue, - arg_name: &str, -) -> Result { - cl_value.to_t::().map_err(|_| { - debug!( - "expected runtime argument '{arg_name}' to be of type {}, but is {}", - T::cl_type(), - cl_value.cl_type() - ); - TransactionV1ConfigFailure::UnexpectedArgType { - arg_name: arg_name.to_string(), - expected: T::cl_type(), - got: cl_value.cl_type().clone(), - } - }) -} - -/// Creates a `RuntimeArgs` suitable for use in a transfer transaction. -pub(in crate::transaction::transaction_v1) fn new_transfer_args>( - source: URef, - target: URef, - amount: A, - maybe_to: Option, - maybe_id: Option, -) -> Result { - let mut args = RuntimeArgs::new(); - TRANSFER_ARG_SOURCE.insert(&mut args, source)?; - TRANSFER_ARG_TARGET.insert(&mut args, target)?; - TRANSFER_ARG_AMOUNT.insert(&mut args, amount.into())?; - if let Some(to) = maybe_to { - TRANSFER_ARG_TO.insert(&mut args, to)?; - } - if let Some(id) = maybe_id { - TRANSFER_ARG_ID.insert(&mut args, id)?; - } - Ok(args) -} - -/// Checks the given `RuntimeArgs` are suitable for use in a transfer transaction. -pub(in crate::transaction::transaction_v1) fn has_valid_transfer_args( - args: &RuntimeArgs, - native_transfer_minimum_motes: u64, -) -> Result<(), TransactionV1ConfigFailure> { - let _source = TRANSFER_ARG_SOURCE.get(args)?; - let _target = TRANSFER_ARG_TARGET.get(args)?; - let amount = TRANSFER_ARG_AMOUNT.get(args)?; - if amount < U512::from(native_transfer_minimum_motes) { - debug!( - minimum = %native_transfer_minimum_motes, - %amount, - "insufficient transfer amount" - ); - return Err(TransactionV1ConfigFailure::InsufficientTransferAmount { - minimum: native_transfer_minimum_motes, - attempted: amount, - }); - } - let _maybe_to = TRANSFER_ARG_TO.get(args)?; - let _maybe_id = TRANSFER_ARG_ID.get(args)?; - Ok(()) -} - -/// Creates a `RuntimeArgs` suitable for use in an add_bid transaction. -pub(in crate::transaction::transaction_v1) fn new_add_bid_args>( - public_key: PublicKey, - delegation_rate: u8, - amount: A, -) -> Result { - let mut args = RuntimeArgs::new(); - ADD_BID_ARG_PUBLIC_KEY.insert(&mut args, public_key)?; - ADD_BID_ARG_DELEGATION_RATE.insert(&mut args, delegation_rate)?; - ADD_BID_ARG_AMOUNT.insert(&mut args, amount.into())?; - Ok(args) -} - -/// Checks the given `RuntimeArgs` are suitable for use in an add_bid transaction. -pub(in crate::transaction::transaction_v1) fn has_valid_add_bid_args( - args: &RuntimeArgs, -) -> Result<(), TransactionV1ConfigFailure> { - let _public_key = ADD_BID_ARG_PUBLIC_KEY.get(args)?; - let _delegation_rate = ADD_BID_ARG_DELEGATION_RATE.get(args)?; - let _amount = ADD_BID_ARG_AMOUNT.get(args)?; - Ok(()) -} - -/// Creates a `RuntimeArgs` suitable for use in a withdraw_bid transaction. -pub(in crate::transaction::transaction_v1) fn new_withdraw_bid_args>( - public_key: PublicKey, - amount: A, -) -> Result { - let mut args = RuntimeArgs::new(); - WITHDRAW_BID_ARG_PUBLIC_KEY.insert(&mut args, public_key)?; - WITHDRAW_BID_ARG_AMOUNT.insert(&mut args, amount.into())?; - Ok(args) -} - -/// Checks the given `RuntimeArgs` are suitable for use in an withdraw_bid transaction. -pub(in crate::transaction::transaction_v1) fn has_valid_withdraw_bid_args( - args: &RuntimeArgs, -) -> Result<(), TransactionV1ConfigFailure> { - let _public_key = WITHDRAW_BID_ARG_PUBLIC_KEY.get(args)?; - let _amount = WITHDRAW_BID_ARG_AMOUNT.get(args)?; - Ok(()) -} - -/// Creates a `RuntimeArgs` suitable for use in a delegate transaction. -pub(in crate::transaction::transaction_v1) fn new_delegate_args>( - delegator: PublicKey, - validator: PublicKey, - amount: A, -) -> Result { - let mut args = RuntimeArgs::new(); - DELEGATE_ARG_DELEGATOR.insert(&mut args, delegator)?; - DELEGATE_ARG_VALIDATOR.insert(&mut args, validator)?; - DELEGATE_ARG_AMOUNT.insert(&mut args, amount.into())?; - Ok(args) -} - -/// Checks the given `RuntimeArgs` are suitable for use in a delegate transaction. -pub(in crate::transaction::transaction_v1) fn has_valid_delegate_args( - args: &RuntimeArgs, -) -> Result<(), TransactionV1ConfigFailure> { - let _delegator = DELEGATE_ARG_DELEGATOR.get(args)?; - let _validator = DELEGATE_ARG_VALIDATOR.get(args)?; - let _amount = DELEGATE_ARG_AMOUNT.get(args)?; - Ok(()) -} - -/// Creates a `RuntimeArgs` suitable for use in an undelegate transaction. -pub(in crate::transaction::transaction_v1) fn new_undelegate_args>( - delegator: PublicKey, - validator: PublicKey, - amount: A, -) -> Result { - let mut args = RuntimeArgs::new(); - UNDELEGATE_ARG_DELEGATOR.insert(&mut args, delegator)?; - UNDELEGATE_ARG_VALIDATOR.insert(&mut args, validator)?; - UNDELEGATE_ARG_AMOUNT.insert(&mut args, amount.into())?; - Ok(args) -} - -/// Checks the given `RuntimeArgs` are suitable for use in an undelegate transaction. -pub(in crate::transaction::transaction_v1) fn has_valid_undelegate_args( - args: &RuntimeArgs, -) -> Result<(), TransactionV1ConfigFailure> { - let _delegator = UNDELEGATE_ARG_DELEGATOR.get(args)?; - let _validator = UNDELEGATE_ARG_VALIDATOR.get(args)?; - let _amount = UNDELEGATE_ARG_AMOUNT.get(args)?; - Ok(()) -} - -/// Creates a `RuntimeArgs` suitable for use in a redelegate transaction. -pub(in crate::transaction::transaction_v1) fn new_redelegate_args>( - delegator: PublicKey, - validator: PublicKey, - amount: A, - new_validator: PublicKey, -) -> Result { - let mut args = RuntimeArgs::new(); - REDELEGATE_ARG_DELEGATOR.insert(&mut args, delegator)?; - REDELEGATE_ARG_VALIDATOR.insert(&mut args, validator)?; - REDELEGATE_ARG_AMOUNT.insert(&mut args, amount.into())?; - REDELEGATE_ARG_NEW_VALIDATOR.insert(&mut args, new_validator)?; - Ok(args) -} - -/// Checks the given `RuntimeArgs` are suitable for use in a redelegate transaction. -pub(in crate::transaction::transaction_v1) fn has_valid_redelegate_args( - args: &RuntimeArgs, -) -> Result<(), TransactionV1ConfigFailure> { - let _delegator = REDELEGATE_ARG_DELEGATOR.get(args)?; - let _validator = REDELEGATE_ARG_VALIDATOR.get(args)?; - let _amount = REDELEGATE_ARG_AMOUNT.get(args)?; - let _new_validator = REDELEGATE_ARG_NEW_VALIDATOR.get(args)?; - Ok(()) -} - -#[cfg(test)] -mod tests { - use rand::Rng; - - use super::*; - use crate::{runtime_args, testing::TestRng, CLType}; - - #[test] - fn should_validate_transfer_args() { - let rng = &mut TestRng::new(); - let min_motes = 10_u64; - // Check random args, within motes limit. - let args = new_transfer_args( - rng.gen(), - rng.gen(), - U512::from(rng.gen_range(min_motes..=u64::MAX)), - rng.gen::().then(|| rng.gen()), - rng.gen::().then(|| rng.gen()), - ) - .unwrap(); - has_valid_transfer_args(&args, min_motes).unwrap(); - - // Check at minimum motes limit. - let args = new_transfer_args( - rng.gen(), - rng.gen(), - U512::from(min_motes), - rng.gen::().then(|| rng.gen()), - rng.gen::().then(|| rng.gen()), - ) - .unwrap(); - has_valid_transfer_args(&args, min_motes).unwrap(); - - // Check with extra arg. - let mut args = new_transfer_args( - rng.gen(), - rng.gen(), - U512::from(min_motes), - rng.gen::().then(|| rng.gen()), - rng.gen::().then(|| rng.gen()), - ) - .unwrap(); - args.insert("a", 1).unwrap(); - has_valid_transfer_args(&args, min_motes).unwrap(); - } - - #[test] - fn transfer_args_with_low_amount_should_be_invalid() { - let rng = &mut TestRng::new(); - let min_motes = 10_u64; - - let args = runtime_args! { - TRANSFER_ARG_SOURCE.name => rng.gen::(), - TRANSFER_ARG_TARGET.name => rng.gen::(), - TRANSFER_ARG_AMOUNT.name => U512::from(min_motes - 1) - }; - - let expected_error = TransactionV1ConfigFailure::InsufficientTransferAmount { - minimum: min_motes, - attempted: U512::from(min_motes - 1), - }; - - assert_eq!( - has_valid_transfer_args(&args, min_motes), - Err(expected_error) - ); - } - - #[test] - fn transfer_args_with_missing_required_should_be_invalid() { - let rng = &mut TestRng::new(); - let min_motes = 10_u64; - - // Missing "source". - let args = runtime_args! { - TRANSFER_ARG_TARGET.name => rng.gen::(), - TRANSFER_ARG_AMOUNT.name => U512::from(min_motes) - }; - let expected_error = TransactionV1ConfigFailure::MissingArg { - arg_name: TRANSFER_ARG_SOURCE.name.to_string(), - }; - assert_eq!( - has_valid_transfer_args(&args, min_motes), - Err(expected_error) - ); - - // Missing "target". - let args = runtime_args! { - TRANSFER_ARG_SOURCE.name => rng.gen::(), - TRANSFER_ARG_AMOUNT.name => U512::from(min_motes) - }; - let expected_error = TransactionV1ConfigFailure::MissingArg { - arg_name: TRANSFER_ARG_TARGET.name.to_string(), - }; - assert_eq!( - has_valid_transfer_args(&args, min_motes), - Err(expected_error) - ); - - // Missing "amount". - let args = runtime_args! { - TRANSFER_ARG_SOURCE.name => rng.gen::(), - TRANSFER_ARG_TARGET.name => rng.gen::() - }; - let expected_error = TransactionV1ConfigFailure::MissingArg { - arg_name: TRANSFER_ARG_AMOUNT.name.to_string(), - }; - assert_eq!( - has_valid_transfer_args(&args, min_motes), - Err(expected_error) - ); - } - - #[test] - fn transfer_args_with_wrong_type_should_be_invalid() { - let rng = &mut TestRng::new(); - let min_motes = 10_u64; - - // Wrong "source" type (a required arg). - let args = runtime_args! { - TRANSFER_ARG_SOURCE.name => 1_u8, - TRANSFER_ARG_TARGET.name => rng.gen::(), - TRANSFER_ARG_AMOUNT.name => U512::from(min_motes) - }; - let expected_error = TransactionV1ConfigFailure::UnexpectedArgType { - arg_name: TRANSFER_ARG_SOURCE.name.to_string(), - expected: CLType::URef, - got: CLType::U8, - }; - assert_eq!( - has_valid_transfer_args(&args, min_motes), - Err(expected_error) - ); - - // Wrong "to" type (an optional arg). - let args = runtime_args! { - TRANSFER_ARG_SOURCE.name => rng.gen::(), - TRANSFER_ARG_TARGET.name => rng.gen::(), - TRANSFER_ARG_AMOUNT.name => U512::from(min_motes), - TRANSFER_ARG_TO.name => 1_u8 - }; - let expected_error = TransactionV1ConfigFailure::UnexpectedArgType { - arg_name: TRANSFER_ARG_TO.name.to_string(), - expected: Option::::cl_type(), - got: CLType::U8, - }; - assert_eq!( - has_valid_transfer_args(&args, min_motes), - Err(expected_error) - ); - } - - #[test] - fn should_validate_add_bid_args() { - let rng = &mut TestRng::new(); - - // Check random args. - let mut args = - new_add_bid_args(PublicKey::random(rng), rng.gen(), rng.gen::()).unwrap(); - has_valid_add_bid_args(&args).unwrap(); - - // Check with extra arg. - args.insert("a", 1).unwrap(); - has_valid_add_bid_args(&args).unwrap(); - } - - #[test] - fn add_bid_args_with_missing_required_should_be_invalid() { - let rng = &mut TestRng::new(); - - // Missing "public_key". - let args = runtime_args! { - ADD_BID_ARG_DELEGATION_RATE.name => rng.gen::(), - ADD_BID_ARG_AMOUNT.name => U512::from(rng.gen::()) - }; - let expected_error = TransactionV1ConfigFailure::MissingArg { - arg_name: ADD_BID_ARG_PUBLIC_KEY.name.to_string(), - }; - assert_eq!(has_valid_add_bid_args(&args), Err(expected_error)); - - // Missing "delegation_rate". - let args = runtime_args! { - ADD_BID_ARG_PUBLIC_KEY.name => PublicKey::random(rng), - ADD_BID_ARG_AMOUNT.name => U512::from(rng.gen::()) - }; - let expected_error = TransactionV1ConfigFailure::MissingArg { - arg_name: ADD_BID_ARG_DELEGATION_RATE.name.to_string(), - }; - assert_eq!(has_valid_add_bid_args(&args), Err(expected_error)); - - // Missing "amount". - let args = runtime_args! { - ADD_BID_ARG_PUBLIC_KEY.name => PublicKey::random(rng), - ADD_BID_ARG_DELEGATION_RATE.name => rng.gen::() - }; - let expected_error = TransactionV1ConfigFailure::MissingArg { - arg_name: ADD_BID_ARG_AMOUNT.name.to_string(), - }; - assert_eq!(has_valid_add_bid_args(&args), Err(expected_error)); - } - - #[test] - fn add_bid_args_with_wrong_type_should_be_invalid() { - let rng = &mut TestRng::new(); - - // Wrong "amount" type. - let args = runtime_args! { - ADD_BID_ARG_PUBLIC_KEY.name => PublicKey::random(rng), - ADD_BID_ARG_DELEGATION_RATE.name => rng.gen::(), - ADD_BID_ARG_AMOUNT.name => rng.gen::() - }; - let expected_error = TransactionV1ConfigFailure::UnexpectedArgType { - arg_name: ADD_BID_ARG_AMOUNT.name.to_string(), - expected: CLType::U512, - got: CLType::U64, - }; - assert_eq!(has_valid_add_bid_args(&args), Err(expected_error)); - } - - #[test] - fn should_validate_withdraw_bid_args() { - let rng = &mut TestRng::new(); - - // Check random args. - let mut args = new_withdraw_bid_args(PublicKey::random(rng), rng.gen::()).unwrap(); - has_valid_withdraw_bid_args(&args).unwrap(); - - // Check with extra arg. - args.insert("a", 1).unwrap(); - has_valid_withdraw_bid_args(&args).unwrap(); - } - - #[test] - fn withdraw_bid_args_with_missing_required_should_be_invalid() { - let rng = &mut TestRng::new(); - - // Missing "public_key". - let args = runtime_args! { - WITHDRAW_BID_ARG_AMOUNT.name => U512::from(rng.gen::()) - }; - let expected_error = TransactionV1ConfigFailure::MissingArg { - arg_name: WITHDRAW_BID_ARG_PUBLIC_KEY.name.to_string(), - }; - assert_eq!(has_valid_withdraw_bid_args(&args), Err(expected_error)); - - // Missing "amount". - let args = runtime_args! { - WITHDRAW_BID_ARG_PUBLIC_KEY.name => PublicKey::random(rng), - }; - let expected_error = TransactionV1ConfigFailure::MissingArg { - arg_name: WITHDRAW_BID_ARG_AMOUNT.name.to_string(), - }; - assert_eq!(has_valid_withdraw_bid_args(&args), Err(expected_error)); - } - - #[test] - fn withdraw_bid_args_with_wrong_type_should_be_invalid() { - let rng = &mut TestRng::new(); - - // Wrong "amount" type. - let args = runtime_args! { - WITHDRAW_BID_ARG_PUBLIC_KEY.name => PublicKey::random(rng), - WITHDRAW_BID_ARG_AMOUNT.name => rng.gen::() - }; - let expected_error = TransactionV1ConfigFailure::UnexpectedArgType { - arg_name: WITHDRAW_BID_ARG_AMOUNT.name.to_string(), - expected: CLType::U512, - got: CLType::U64, - }; - assert_eq!(has_valid_withdraw_bid_args(&args), Err(expected_error)); - } - - #[test] - fn should_validate_delegate_args() { - let rng = &mut TestRng::new(); - - // Check random args. - let mut args = new_delegate_args( - PublicKey::random(rng), - PublicKey::random(rng), - rng.gen::(), - ) - .unwrap(); - has_valid_delegate_args(&args).unwrap(); - - // Check with extra arg. - args.insert("a", 1).unwrap(); - has_valid_delegate_args(&args).unwrap(); - } - - #[test] - fn delegate_args_with_missing_required_should_be_invalid() { - let rng = &mut TestRng::new(); - - // Missing "delegator". - let args = runtime_args! { - DELEGATE_ARG_VALIDATOR.name => PublicKey::random(rng), - DELEGATE_ARG_AMOUNT.name => U512::from(rng.gen::()) - }; - let expected_error = TransactionV1ConfigFailure::MissingArg { - arg_name: DELEGATE_ARG_DELEGATOR.name.to_string(), - }; - assert_eq!(has_valid_delegate_args(&args), Err(expected_error)); - - // Missing "validator". - let args = runtime_args! { - DELEGATE_ARG_DELEGATOR.name => PublicKey::random(rng), - DELEGATE_ARG_AMOUNT.name => U512::from(rng.gen::()) - }; - let expected_error = TransactionV1ConfigFailure::MissingArg { - arg_name: DELEGATE_ARG_VALIDATOR.name.to_string(), - }; - assert_eq!(has_valid_delegate_args(&args), Err(expected_error)); - - // Missing "amount". - let args = runtime_args! { - DELEGATE_ARG_DELEGATOR.name => PublicKey::random(rng), - DELEGATE_ARG_VALIDATOR.name => PublicKey::random(rng), - }; - let expected_error = TransactionV1ConfigFailure::MissingArg { - arg_name: DELEGATE_ARG_AMOUNT.name.to_string(), - }; - assert_eq!(has_valid_delegate_args(&args), Err(expected_error)); - } - - #[test] - fn delegate_args_with_wrong_type_should_be_invalid() { - let rng = &mut TestRng::new(); - - // Wrong "amount" type. - let args = runtime_args! { - DELEGATE_ARG_DELEGATOR.name => PublicKey::random(rng), - DELEGATE_ARG_VALIDATOR.name => PublicKey::random(rng), - DELEGATE_ARG_AMOUNT.name => rng.gen::() - }; - let expected_error = TransactionV1ConfigFailure::UnexpectedArgType { - arg_name: DELEGATE_ARG_AMOUNT.name.to_string(), - expected: CLType::U512, - got: CLType::U64, - }; - assert_eq!(has_valid_delegate_args(&args), Err(expected_error)); - } - - #[test] - fn should_validate_undelegate_args() { - let rng = &mut TestRng::new(); - - // Check random args. - let mut args = new_undelegate_args( - PublicKey::random(rng), - PublicKey::random(rng), - rng.gen::(), - ) - .unwrap(); - has_valid_undelegate_args(&args).unwrap(); - - // Check with extra arg. - args.insert("a", 1).unwrap(); - has_valid_undelegate_args(&args).unwrap(); - } - - #[test] - fn undelegate_args_with_missing_required_should_be_invalid() { - let rng = &mut TestRng::new(); - - // Missing "delegator". - let args = runtime_args! { - UNDELEGATE_ARG_VALIDATOR.name => PublicKey::random(rng), - UNDELEGATE_ARG_AMOUNT.name => U512::from(rng.gen::()) - }; - let expected_error = TransactionV1ConfigFailure::MissingArg { - arg_name: UNDELEGATE_ARG_DELEGATOR.name.to_string(), - }; - assert_eq!(has_valid_undelegate_args(&args), Err(expected_error)); - - // Missing "validator". - let args = runtime_args! { - UNDELEGATE_ARG_DELEGATOR.name => PublicKey::random(rng), - UNDELEGATE_ARG_AMOUNT.name => U512::from(rng.gen::()) - }; - let expected_error = TransactionV1ConfigFailure::MissingArg { - arg_name: UNDELEGATE_ARG_VALIDATOR.name.to_string(), - }; - assert_eq!(has_valid_undelegate_args(&args), Err(expected_error)); - - // Missing "amount". - let args = runtime_args! { - UNDELEGATE_ARG_DELEGATOR.name => PublicKey::random(rng), - UNDELEGATE_ARG_VALIDATOR.name => PublicKey::random(rng), - }; - let expected_error = TransactionV1ConfigFailure::MissingArg { - arg_name: UNDELEGATE_ARG_AMOUNT.name.to_string(), - }; - assert_eq!(has_valid_undelegate_args(&args), Err(expected_error)); - } - - #[test] - fn undelegate_args_with_wrong_type_should_be_invalid() { - let rng = &mut TestRng::new(); - - // Wrong "amount" type. - let args = runtime_args! { - UNDELEGATE_ARG_DELEGATOR.name => PublicKey::random(rng), - UNDELEGATE_ARG_VALIDATOR.name => PublicKey::random(rng), - UNDELEGATE_ARG_AMOUNT.name => rng.gen::() - }; - let expected_error = TransactionV1ConfigFailure::UnexpectedArgType { - arg_name: UNDELEGATE_ARG_AMOUNT.name.to_string(), - expected: CLType::U512, - got: CLType::U64, - }; - assert_eq!(has_valid_undelegate_args(&args), Err(expected_error)); - } - - #[test] - fn should_validate_redelegate_args() { - let rng = &mut TestRng::new(); - - // Check random args. - let mut args = new_redelegate_args( - PublicKey::random(rng), - PublicKey::random(rng), - rng.gen::(), - PublicKey::random(rng), - ) - .unwrap(); - has_valid_redelegate_args(&args).unwrap(); - - // Check with extra arg. - args.insert("a", 1).unwrap(); - has_valid_redelegate_args(&args).unwrap(); - } - - #[test] - fn redelegate_args_with_missing_required_should_be_invalid() { - let rng = &mut TestRng::new(); - - // Missing "delegator". - let args = runtime_args! { - REDELEGATE_ARG_VALIDATOR.name => PublicKey::random(rng), - REDELEGATE_ARG_AMOUNT.name => U512::from(rng.gen::()), - REDELEGATE_ARG_NEW_VALIDATOR.name => PublicKey::random(rng), - }; - let expected_error = TransactionV1ConfigFailure::MissingArg { - arg_name: REDELEGATE_ARG_DELEGATOR.name.to_string(), - }; - assert_eq!(has_valid_redelegate_args(&args), Err(expected_error)); - - // Missing "validator". - let args = runtime_args! { - REDELEGATE_ARG_DELEGATOR.name => PublicKey::random(rng), - REDELEGATE_ARG_AMOUNT.name => U512::from(rng.gen::()), - REDELEGATE_ARG_NEW_VALIDATOR.name => PublicKey::random(rng), - }; - let expected_error = TransactionV1ConfigFailure::MissingArg { - arg_name: REDELEGATE_ARG_VALIDATOR.name.to_string(), - }; - assert_eq!(has_valid_redelegate_args(&args), Err(expected_error)); - - // Missing "amount". - let args = runtime_args! { - REDELEGATE_ARG_DELEGATOR.name => PublicKey::random(rng), - REDELEGATE_ARG_VALIDATOR.name => PublicKey::random(rng), - REDELEGATE_ARG_NEW_VALIDATOR.name => PublicKey::random(rng), - }; - let expected_error = TransactionV1ConfigFailure::MissingArg { - arg_name: REDELEGATE_ARG_AMOUNT.name.to_string(), - }; - assert_eq!(has_valid_redelegate_args(&args), Err(expected_error)); - - // Missing "new_validator". - let args = runtime_args! { - REDELEGATE_ARG_DELEGATOR.name => PublicKey::random(rng), - REDELEGATE_ARG_VALIDATOR.name => PublicKey::random(rng), - REDELEGATE_ARG_AMOUNT.name => U512::from(rng.gen::()), - }; - let expected_error = TransactionV1ConfigFailure::MissingArg { - arg_name: REDELEGATE_ARG_NEW_VALIDATOR.name.to_string(), - }; - assert_eq!(has_valid_redelegate_args(&args), Err(expected_error)); - } - - #[test] - fn redelegate_args_with_wrong_type_should_be_invalid() { - let rng = &mut TestRng::new(); - - // Wrong "amount" type. - let args = runtime_args! { - REDELEGATE_ARG_DELEGATOR.name => PublicKey::random(rng), - REDELEGATE_ARG_VALIDATOR.name => PublicKey::random(rng), - REDELEGATE_ARG_AMOUNT.name => rng.gen::(), - REDELEGATE_ARG_NEW_VALIDATOR.name => PublicKey::random(rng), - }; - let expected_error = TransactionV1ConfigFailure::UnexpectedArgType { - arg_name: REDELEGATE_ARG_AMOUNT.name.to_string(), - expected: CLType::U512, - got: CLType::U64, - }; - assert_eq!(has_valid_redelegate_args(&args), Err(expected_error)); - } -} diff --git a/casper_types_ver_2_0/src/transaction/transaction_v1/transaction_v1_builder.rs b/casper_types_ver_2_0/src/transaction/transaction_v1/transaction_v1_builder.rs deleted file mode 100644 index f707cfe2..00000000 --- a/casper_types_ver_2_0/src/transaction/transaction_v1/transaction_v1_builder.rs +++ /dev/null @@ -1,490 +0,0 @@ -mod error; - -use core::marker::PhantomData; - -#[cfg(any(feature = "testing", test))] -use rand::Rng; - -use super::{ - super::{ - InitiatorAddr, TransactionEntryPoint, TransactionInvocationTarget, TransactionRuntime, - TransactionScheduling, TransactionSessionKind, TransactionTarget, - }, - transaction_v1_body::arg_handling, - InitiatorAddrAndSecretKey, PricingMode, TransactionV1, TransactionV1Body, -}; -use crate::{ - account::AccountHash, bytesrepr::Bytes, CLValue, CLValueError, EntityAddr, EntityVersion, - PackageAddr, PublicKey, RuntimeArgs, SecretKey, TimeDiff, Timestamp, URef, U512, -}; -#[cfg(any(feature = "testing", test))] -use crate::{testing::TestRng, TransactionConfig, TransactionV1Approval, TransactionV1Hash}; -pub use error::TransactionV1BuilderError; - -/// A builder for constructing a [`TransactionV1`]. -/// -/// # Note -/// -/// Before calling [`build`](Self::build), you must ensure that: -/// * an initiator_addr is provided by either calling -/// [`with_initiator_addr`](Self::with_initiator_addr) or -/// [`with_secret_key`](Self::with_secret_key) -/// * the chain name is set by calling [`with_chain_name`](Self::with_chain_name) -/// -/// If no secret key is provided, the resulting transaction will be unsigned, and hence invalid. -/// It can be signed later (multiple times if desired) to make it valid before sending to the -/// network for execution. -pub struct TransactionV1Builder<'a> { - chain_name: Option, - timestamp: Timestamp, - ttl: TimeDiff, - body: TransactionV1Body, - pricing_mode: PricingMode, - payment_amount: Option, - initiator_addr: Option, - #[cfg(not(any(feature = "testing", test)))] - secret_key: Option<&'a SecretKey>, - #[cfg(any(feature = "testing", test))] - secret_key: Option, - #[cfg(any(feature = "testing", test))] - invalid_approvals: Vec, - _phantom_data: PhantomData<&'a ()>, -} - -impl<'a> TransactionV1Builder<'a> { - /// The default time-to-live for transactions, i.e. 30 minutes. - pub const DEFAULT_TTL: TimeDiff = TimeDiff::from_millis(30 * 60 * 1_000); - /// The default pricing mode for transactions, i.e. multiplier of 1. - pub const DEFAULT_PRICING_MODE: PricingMode = PricingMode::GasPriceMultiplier(1); - /// The default runtime for transactions, i.e. Casper Version 1 Virtual Machine. - pub const DEFAULT_RUNTIME: TransactionRuntime = TransactionRuntime::VmCasperV1; - /// The default scheduling for transactions, i.e. `Standard`. - pub const DEFAULT_SCHEDULING: TransactionScheduling = TransactionScheduling::Standard; - - fn new(body: TransactionV1Body) -> Self { - TransactionV1Builder { - chain_name: None, - timestamp: Timestamp::now(), - ttl: Self::DEFAULT_TTL, - body, - pricing_mode: Self::DEFAULT_PRICING_MODE, - payment_amount: None, - initiator_addr: None, - secret_key: None, - _phantom_data: PhantomData, - #[cfg(any(feature = "testing", test))] - invalid_approvals: vec![], - } - } - - /// Returns a new `TransactionV1Builder` suitable for building a native transfer transaction. - pub fn new_transfer>( - source: URef, - target: URef, - amount: A, - maybe_to: Option, - maybe_id: Option, - ) -> Result { - let args = arg_handling::new_transfer_args(source, target, amount, maybe_to, maybe_id)?; - let body = TransactionV1Body::new( - args, - TransactionTarget::Native, - TransactionEntryPoint::Transfer, - Self::DEFAULT_SCHEDULING, - ); - Ok(TransactionV1Builder::new(body)) - } - - /// Returns a new `TransactionV1Builder` suitable for building a native add_bid transaction. - pub fn new_add_bid>( - public_key: PublicKey, - delegation_rate: u8, - amount: A, - ) -> Result { - let args = arg_handling::new_add_bid_args(public_key, delegation_rate, amount)?; - let body = TransactionV1Body::new( - args, - TransactionTarget::Native, - TransactionEntryPoint::AddBid, - Self::DEFAULT_SCHEDULING, - ); - Ok(TransactionV1Builder::new(body)) - } - - /// Returns a new `TransactionV1Builder` suitable for building a native withdraw_bid - /// transaction. - pub fn new_withdraw_bid>( - public_key: PublicKey, - amount: A, - ) -> Result { - let args = arg_handling::new_withdraw_bid_args(public_key, amount)?; - let body = TransactionV1Body::new( - args, - TransactionTarget::Native, - TransactionEntryPoint::WithdrawBid, - Self::DEFAULT_SCHEDULING, - ); - Ok(TransactionV1Builder::new(body)) - } - - /// Returns a new `TransactionV1Builder` suitable for building a native delegate transaction. - pub fn new_delegate>( - delegator: PublicKey, - validator: PublicKey, - amount: A, - ) -> Result { - let args = arg_handling::new_delegate_args(delegator, validator, amount)?; - let body = TransactionV1Body::new( - args, - TransactionTarget::Native, - TransactionEntryPoint::Delegate, - Self::DEFAULT_SCHEDULING, - ); - Ok(TransactionV1Builder::new(body)) - } - - /// Returns a new `TransactionV1Builder` suitable for building a native undelegate transaction. - pub fn new_undelegate>( - delegator: PublicKey, - validator: PublicKey, - amount: A, - ) -> Result { - let args = arg_handling::new_undelegate_args(delegator, validator, amount)?; - let body = TransactionV1Body::new( - args, - TransactionTarget::Native, - TransactionEntryPoint::Undelegate, - Self::DEFAULT_SCHEDULING, - ); - Ok(TransactionV1Builder::new(body)) - } - - /// Returns a new `TransactionV1Builder` suitable for building a native redelegate transaction. - pub fn new_redelegate>( - delegator: PublicKey, - validator: PublicKey, - amount: A, - new_validator: PublicKey, - ) -> Result { - let args = arg_handling::new_redelegate_args(delegator, validator, amount, new_validator)?; - let body = TransactionV1Body::new( - args, - TransactionTarget::Native, - TransactionEntryPoint::Redelegate, - Self::DEFAULT_SCHEDULING, - ); - Ok(TransactionV1Builder::new(body)) - } - - fn new_targeting_stored>( - id: TransactionInvocationTarget, - entry_point: E, - ) -> Self { - let target = TransactionTarget::Stored { - id, - runtime: Self::DEFAULT_RUNTIME, - }; - let body = TransactionV1Body::new( - RuntimeArgs::new(), - target, - TransactionEntryPoint::Custom(entry_point.into()), - Self::DEFAULT_SCHEDULING, - ); - TransactionV1Builder::new(body) - } - - /// Returns a new `TransactionV1Builder` suitable for building a transaction targeting a stored - /// entity. - pub fn new_targeting_invocable_entity>( - addr: EntityAddr, - entry_point: E, - ) -> Self { - let id = TransactionInvocationTarget::new_invocable_entity(addr); - Self::new_targeting_stored(id, entry_point) - } - - /// Returns a new `TransactionV1Builder` suitable for building a transaction targeting a stored - /// entity via its alias. - pub fn new_targeting_invocable_entity_via_alias, E: Into>( - alias: A, - entry_point: E, - ) -> Self { - let id = TransactionInvocationTarget::new_invocable_entity_alias(alias.into()); - Self::new_targeting_stored(id, entry_point) - } - - /// Returns a new `TransactionV1Builder` suitable for building a transaction targeting a - /// package. - pub fn new_targeting_package>( - addr: PackageAddr, - version: Option, - entry_point: E, - ) -> Self { - let id = TransactionInvocationTarget::new_package(addr, version); - Self::new_targeting_stored(id, entry_point) - } - - /// Returns a new `TransactionV1Builder` suitable for building a transaction targeting a - /// package via its alias. - pub fn new_targeting_package_via_alias, E: Into>( - alias: A, - version: Option, - entry_point: E, - ) -> Self { - let id = TransactionInvocationTarget::new_package_alias(alias.into(), version); - Self::new_targeting_stored(id, entry_point) - } - - /// Returns a new `TransactionV1Builder` suitable for building a transaction for running session - /// logic, i.e. compiled Wasm. - pub fn new_session>( - kind: TransactionSessionKind, - module_bytes: Bytes, - entry_point: E, - ) -> Self { - let target = TransactionTarget::Session { - kind, - module_bytes, - runtime: Self::DEFAULT_RUNTIME, - }; - let body = TransactionV1Body::new( - RuntimeArgs::new(), - target, - TransactionEntryPoint::Custom(entry_point.into()), - Self::DEFAULT_SCHEDULING, - ); - TransactionV1Builder::new(body) - } - - /// Returns a new `TransactionV1Builder` which will build a random, valid but possibly expired - /// transaction. - /// - /// The transaction can be made invalid in the following ways: - /// * unsigned by calling `with_no_secret_key` - /// * given an invalid approval by calling `with_invalid_approval` - #[cfg(any(feature = "testing", test))] - pub fn new_random(rng: &mut TestRng) -> Self { - let secret_key = SecretKey::random(rng); - let ttl_millis = rng.gen_range(60_000..TransactionConfig::default().max_ttl.millis()); - let body = TransactionV1Body::random(rng); - TransactionV1Builder { - chain_name: Some(rng.random_string(5..10)), - timestamp: Timestamp::random(rng), - ttl: TimeDiff::from_millis(ttl_millis), - body, - pricing_mode: PricingMode::random(rng), - payment_amount: Some( - rng.gen_range(2_500_000_000..=TransactionConfig::default().block_gas_limit), - ), - initiator_addr: Some(InitiatorAddr::PublicKey(PublicKey::from(&secret_key))), - secret_key: Some(secret_key), - _phantom_data: PhantomData, - invalid_approvals: vec![], - } - } - - /// Sets the `chain_name` in the transaction. - /// - /// Must be provided or building will fail. - pub fn with_chain_name>(mut self, chain_name: C) -> Self { - self.chain_name = Some(chain_name.into()); - self - } - - /// Sets the `timestamp` in the transaction. - /// - /// If not provided, the timestamp will be set to the time when the builder was constructed. - pub fn with_timestamp(mut self, timestamp: Timestamp) -> Self { - self.timestamp = timestamp; - self - } - - /// Sets the `ttl` (time-to-live) in the transaction. - /// - /// If not provided, the ttl will be set to [`Self::DEFAULT_TTL`]. - pub fn with_ttl(mut self, ttl: TimeDiff) -> Self { - self.ttl = ttl; - self - } - - /// Sets the `pricing_mode` in the transaction. - /// - /// If not provided, the pricing mode will be set to [`Self::DEFAULT_PRICING_MODE`]. - pub fn with_pricing_mode(mut self, pricing_mode: PricingMode) -> Self { - self.pricing_mode = pricing_mode; - self - } - - /// Sets the `payment_amount` in the transaction. - /// - /// If not provided, `payment_amount` will be set to `None`. - pub fn with_payment_amount(mut self, payment_amount: u64) -> Self { - self.payment_amount = Some(payment_amount); - self - } - - /// Sets the `initiator_addr` in the transaction. - /// - /// If not provided, the public key derived from the secret key used in the builder will be - /// used as the `InitiatorAddr::PublicKey` in the transaction. - pub fn with_initiator_addr(mut self, initiator_addr: InitiatorAddr) -> Self { - self.initiator_addr = Some(initiator_addr); - self - } - - /// Sets the secret key used to sign the transaction on calling [`build`](Self::build). - /// - /// If not provided, the transaction can still be built, but will be unsigned and will be - /// invalid until subsequently signed. - pub fn with_secret_key(mut self, secret_key: &'a SecretKey) -> Self { - #[cfg(not(any(feature = "testing", test)))] - { - self.secret_key = Some(secret_key); - } - #[cfg(any(feature = "testing", test))] - { - self.secret_key = Some( - SecretKey::from_der(secret_key.to_der().expect("should der-encode")) - .expect("should der-decode"), - ); - } - self - } - - /// Appends the given runtime arg into the body's `args`. - pub fn with_runtime_arg>(mut self, key: K, cl_value: CLValue) -> Self { - self.body.args.insert_cl_value(key, cl_value); - self - } - - /// Sets the runtime args in the transaction. - /// - /// NOTE: this overwrites any existing runtime args. To append to existing args, use - /// [`TransactionV1Builder::with_runtime_arg`]. - pub fn with_runtime_args(mut self, args: RuntimeArgs) -> Self { - self.body.args = args; - self - } - - /// Sets the runtime for the transaction. - /// - /// If not provided, the runtime will be set to [`Self::DEFAULT_RUNTIME`]. - /// - /// NOTE: This has no effect for native transactions, i.e. where the `body.target` is - /// `TransactionTarget::Native`. - pub fn with_runtime(mut self, runtime: TransactionRuntime) -> Self { - match &mut self.body.target { - TransactionTarget::Native => {} - TransactionTarget::Stored { - runtime: existing_runtime, - .. - } => { - *existing_runtime = runtime; - } - TransactionTarget::Session { - runtime: existing_runtime, - .. - } => { - *existing_runtime = runtime; - } - } - self - } - - /// Sets the scheduling for the transaction. - /// - /// If not provided, the scheduling will be set to [`Self::DEFAULT_SCHEDULING`]. - pub fn with_scheduling(mut self, scheduling: TransactionScheduling) -> Self { - self.body.scheduling = scheduling; - self - } - - /// Sets the secret key to `None`, meaning the transaction can still be built but will be - /// unsigned and will be invalid until subsequently signed. - #[cfg(any(feature = "testing", test))] - pub fn with_no_secret_key(mut self) -> Self { - self.secret_key = None; - self - } - - /// Sets an invalid approval in the transaction. - #[cfg(any(feature = "testing", test))] - pub fn with_invalid_approval(mut self, rng: &mut TestRng) -> Self { - let secret_key = SecretKey::random(rng); - let hash = TransactionV1Hash::random(rng); - let approval = TransactionV1Approval::create(&hash, &secret_key); - self.invalid_approvals.push(approval); - self - } - - /// Returns the new transaction, or an error if non-defaulted fields were not set. - /// - /// For more info, see [the `TransactionBuilder` documentation](TransactionV1Builder). - pub fn build(self) -> Result { - self.do_build() - } - - #[cfg(not(any(feature = "testing", test)))] - fn do_build(self) -> Result { - let initiator_addr_and_secret_key = match (self.initiator_addr, self.secret_key) { - (Some(initiator_addr), Some(secret_key)) => InitiatorAddrAndSecretKey::Both { - initiator_addr, - secret_key, - }, - (Some(initiator_addr), None) => { - InitiatorAddrAndSecretKey::InitiatorAddr(initiator_addr) - } - (None, Some(secret_key)) => InitiatorAddrAndSecretKey::SecretKey(secret_key), - (None, None) => return Err(TransactionV1BuilderError::MissingInitiatorAddr), - }; - - let chain_name = self - .chain_name - .ok_or(TransactionV1BuilderError::MissingChainName)?; - - let transaction = TransactionV1::build( - chain_name, - self.timestamp, - self.ttl, - self.body, - self.pricing_mode, - self.payment_amount, - initiator_addr_and_secret_key, - ); - - Ok(transaction) - } - - #[cfg(any(feature = "testing", test))] - fn do_build(self) -> Result { - let initiator_addr_and_secret_key = match (self.initiator_addr, &self.secret_key) { - (Some(initiator_addr), Some(secret_key)) => InitiatorAddrAndSecretKey::Both { - initiator_addr, - secret_key, - }, - (Some(initiator_addr), None) => { - InitiatorAddrAndSecretKey::InitiatorAddr(initiator_addr) - } - (None, Some(secret_key)) => InitiatorAddrAndSecretKey::SecretKey(secret_key), - (None, None) => return Err(TransactionV1BuilderError::MissingInitiatorAddr), - }; - - let chain_name = self - .chain_name - .ok_or(TransactionV1BuilderError::MissingChainName)?; - - let mut transaction = TransactionV1::build( - chain_name, - self.timestamp, - self.ttl, - self.body, - self.pricing_mode, - self.payment_amount, - initiator_addr_and_secret_key, - ); - - transaction.apply_approvals(self.invalid_approvals); - - Ok(transaction) - } -} diff --git a/casper_types_ver_2_0/src/transaction/transaction_v1/transaction_v1_builder/error.rs b/casper_types_ver_2_0/src/transaction/transaction_v1/transaction_v1_builder/error.rs deleted file mode 100644 index f9212100..00000000 --- a/casper_types_ver_2_0/src/transaction/transaction_v1/transaction_v1_builder/error.rs +++ /dev/null @@ -1,44 +0,0 @@ -use core::fmt::{self, Display, Formatter}; -#[cfg(feature = "std")] -use std::error::Error as StdError; - -#[cfg(doc)] -use super::{TransactionV1, TransactionV1Builder}; - -/// Errors returned while building a [`TransactionV1`] using a [`TransactionV1Builder`]. -#[derive(Clone, Eq, PartialEq, Debug)] -#[non_exhaustive] -pub enum TransactionV1BuilderError { - /// Failed to build transaction due to missing initiator_addr. - /// - /// Call [`TransactionV1Builder::with_initiator_addr`] or - /// [`TransactionV1Builder::with_secret_key`] before calling [`TransactionV1Builder::build`]. - MissingInitiatorAddr, - /// Failed to build transaction due to missing chain name. - /// - /// Call [`TransactionV1Builder::with_chain_name`] before calling - /// [`TransactionV1Builder::build`]. - MissingChainName, -} - -impl Display for TransactionV1BuilderError { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - match self { - TransactionV1BuilderError::MissingInitiatorAddr => { - write!( - formatter, - "transaction requires account - use `with_account` or `with_secret_key`" - ) - } - TransactionV1BuilderError::MissingChainName => { - write!( - formatter, - "transaction requires chain name - use `with_chain_name`" - ) - } - } - } -} - -#[cfg(feature = "std")] -impl StdError for TransactionV1BuilderError {} diff --git a/casper_types_ver_2_0/src/transaction/transaction_v1/transaction_v1_hash.rs b/casper_types_ver_2_0/src/transaction/transaction_v1/transaction_v1_hash.rs deleted file mode 100644 index c7ba947d..00000000 --- a/casper_types_ver_2_0/src/transaction/transaction_v1/transaction_v1_hash.rs +++ /dev/null @@ -1,117 +0,0 @@ -use alloc::vec::Vec; -use core::fmt::{self, Display, Formatter}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(any(feature = "testing", test))] -use rand::Rng; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -#[cfg(doc)] -use super::TransactionV1; -#[cfg(any(feature = "testing", test))] -use crate::testing::TestRng; -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - Digest, -}; - -/// The cryptographic hash of a [`TransactionV1`]. -#[derive( - Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Serialize, Deserialize, Debug, Default, -)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr( - feature = "json-schema", - derive(JsonSchema), - schemars(description = "Hex-encoded TransactionV1 hash.") -)] -#[serde(deny_unknown_fields)] -pub struct TransactionV1Hash(Digest); - -impl TransactionV1Hash { - /// The number of bytes in a `TransactionV1Hash` digest. - pub const LENGTH: usize = Digest::LENGTH; - - /// Constructs a new `TransactionV1Hash`. - pub const fn new(hash: Digest) -> Self { - TransactionV1Hash(hash) - } - - /// Returns the wrapped inner digest. - pub fn inner(&self) -> &Digest { - &self.0 - } - - /// Returns a new `TransactionV1Hash` directly initialized with the provided bytes; no hashing - /// is done. - #[cfg(any(feature = "testing", test))] - pub const fn from_raw(raw_digest: [u8; Self::LENGTH]) -> Self { - TransactionV1Hash(Digest::from_raw(raw_digest)) - } - - /// Returns a random `TransactionV1Hash`. - #[cfg(any(feature = "testing", test))] - pub fn random(rng: &mut TestRng) -> Self { - let hash = rng.gen::<[u8; Digest::LENGTH]>().into(); - TransactionV1Hash(hash) - } -} - -impl From for TransactionV1Hash { - fn from(digest: Digest) -> Self { - TransactionV1Hash(digest) - } -} - -impl From for Digest { - fn from(transaction_hash: TransactionV1Hash) -> Self { - transaction_hash.0 - } -} - -impl Display for TransactionV1Hash { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - write!(formatter, "transaction-v1-hash({})", self.0) - } -} - -impl AsRef<[u8]> for TransactionV1Hash { - fn as_ref(&self) -> &[u8] { - self.0.as_ref() - } -} - -impl ToBytes for TransactionV1Hash { - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.0.write_bytes(writer) - } - - fn to_bytes(&self) -> Result, bytesrepr::Error> { - self.0.to_bytes() - } - - fn serialized_length(&self) -> usize { - self.0.serialized_length() - } -} - -impl FromBytes for TransactionV1Hash { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - Digest::from_bytes(bytes).map(|(inner, remainder)| (TransactionV1Hash(inner), remainder)) - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn bytesrepr_roundtrip() { - let rng = &mut TestRng::new(); - let hash = TransactionV1Hash::random(rng); - bytesrepr::test_serialization_roundtrip(&hash); - } -} diff --git a/casper_types_ver_2_0/src/transaction/transaction_v1/transaction_v1_header.rs b/casper_types_ver_2_0/src/transaction/transaction_v1/transaction_v1_header.rs deleted file mode 100644 index 65926bee..00000000 --- a/casper_types_ver_2_0/src/transaction/transaction_v1/transaction_v1_header.rs +++ /dev/null @@ -1,244 +0,0 @@ -use alloc::{ - string::{String, ToString}, - vec::Vec, -}; -use core::fmt::{self, Display, Formatter}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -#[cfg(any(feature = "std", test))] -use serde::{Deserialize, Serialize}; -#[cfg(any(feature = "std", test))] -use tracing::debug; - -#[cfg(doc)] -use super::TransactionV1; -use super::{InitiatorAddr, PricingMode}; -use crate::{ - bytesrepr::{self, FromBytes, ToBytes}, - Digest, TimeDiff, Timestamp, -}; -#[cfg(any(feature = "std", test))] -use crate::{TransactionConfig, TransactionV1ConfigFailure, TransactionV1Hash}; - -/// The header portion of a [`TransactionV1`]. -#[derive(Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Debug)] -#[cfg_attr( - any(feature = "std", test), - derive(Serialize, Deserialize), - serde(deny_unknown_fields) -)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr( - feature = "json-schema", - derive(JsonSchema), - schemars(description = "The header portion of a TransactionV1.") -)] -pub struct TransactionV1Header { - chain_name: String, - timestamp: Timestamp, - ttl: TimeDiff, - body_hash: Digest, - pricing_mode: PricingMode, - payment_amount: Option, - initiator_addr: InitiatorAddr, -} - -impl TransactionV1Header { - #[cfg(any(feature = "std", feature = "json-schema", test))] - pub(super) fn new( - chain_name: String, - timestamp: Timestamp, - ttl: TimeDiff, - body_hash: Digest, - pricing_mode: PricingMode, - payment_amount: Option, - initiator_addr: InitiatorAddr, - ) -> Self { - TransactionV1Header { - chain_name, - timestamp, - ttl, - body_hash, - pricing_mode, - payment_amount, - initiator_addr, - } - } - - /// Computes the hash identifying this transaction. - #[cfg(any(feature = "std", test))] - pub fn compute_hash(&self) -> TransactionV1Hash { - TransactionV1Hash::new(Digest::hash( - self.to_bytes() - .unwrap_or_else(|error| panic!("should serialize header: {}", error)), - )) - } - - /// Returns the name of the chain the transaction should be executed on. - pub fn chain_name(&self) -> &str { - &self.chain_name - } - - /// Returns the creation timestamp of the transaction. - pub fn timestamp(&self) -> Timestamp { - self.timestamp - } - - /// Returns the duration after the creation timestamp for which the transaction will stay valid. - /// - /// After this duration has ended, the transaction will be considered expired. - pub fn ttl(&self) -> TimeDiff { - self.ttl - } - - /// Returns `true` if the transaction has expired. - pub fn expired(&self, current_instant: Timestamp) -> bool { - self.expires() < current_instant - } - - /// Returns the hash of the body of the transaction. - pub fn body_hash(&self) -> &Digest { - &self.body_hash - } - - /// Returns the pricing mode for the transaction. - pub fn pricing_mode(&self) -> &PricingMode { - &self.pricing_mode - } - - /// Returns the payment amount for the transaction. - pub fn payment_amount(&self) -> Option { - self.payment_amount - } - - /// Returns the address of the initiator of the transaction. - pub fn initiator_addr(&self) -> &InitiatorAddr { - &self.initiator_addr - } - - /// Returns `Ok` if and only if the TTL is within limits, and the timestamp is not later than - /// `at + timestamp_leeway`. Does NOT check for expiry. - #[cfg(any(feature = "std", test))] - pub fn is_valid( - &self, - config: &TransactionConfig, - timestamp_leeway: TimeDiff, - at: Timestamp, - transaction_hash: &TransactionV1Hash, - ) -> Result<(), TransactionV1ConfigFailure> { - if self.ttl() > config.max_ttl { - debug!( - %transaction_hash, - transaction_header = %self, - max_ttl = %config.max_ttl, - "transaction ttl excessive" - ); - return Err(TransactionV1ConfigFailure::ExcessiveTimeToLive { - max_ttl: config.max_ttl, - got: self.ttl(), - }); - } - - if self.timestamp() > at + timestamp_leeway { - debug!( - %transaction_hash, transaction_header = %self, %at, - "transaction timestamp in the future" - ); - return Err(TransactionV1ConfigFailure::TimestampInFuture { - validation_timestamp: at, - timestamp_leeway, - got: self.timestamp(), - }); - } - - Ok(()) - } - - /// Returns the timestamp of when the transaction expires, i.e. `self.timestamp + self.ttl`. - pub fn expires(&self) -> Timestamp { - self.timestamp.saturating_add(self.ttl) - } - - #[cfg(any(all(feature = "std", feature = "testing"), test))] - pub(super) fn invalidate(&mut self) { - self.chain_name.clear(); - } -} - -impl ToBytes for TransactionV1Header { - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.chain_name.write_bytes(writer)?; - self.timestamp.write_bytes(writer)?; - self.ttl.write_bytes(writer)?; - self.body_hash.write_bytes(writer)?; - self.pricing_mode.write_bytes(writer)?; - self.payment_amount.write_bytes(writer)?; - self.initiator_addr.write_bytes(writer) - } - - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - self.chain_name.serialized_length() - + self.timestamp.serialized_length() - + self.ttl.serialized_length() - + self.body_hash.serialized_length() - + self.pricing_mode.serialized_length() - + self.payment_amount.serialized_length() - + self.initiator_addr.serialized_length() - } -} - -impl FromBytes for TransactionV1Header { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (chain_name, remainder) = String::from_bytes(bytes)?; - let (timestamp, remainder) = Timestamp::from_bytes(remainder)?; - let (ttl, remainder) = TimeDiff::from_bytes(remainder)?; - let (body_hash, remainder) = Digest::from_bytes(remainder)?; - let (pricing_mode, remainder) = PricingMode::from_bytes(remainder)?; - let (payment_amount, remainder) = Option::::from_bytes(remainder)?; - let (initiator_addr, remainder) = InitiatorAddr::from_bytes(remainder)?; - let transaction_header = TransactionV1Header { - chain_name, - timestamp, - ttl, - body_hash, - pricing_mode, - payment_amount, - initiator_addr, - }; - Ok((transaction_header, remainder)) - } -} - -impl Display for TransactionV1Header { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - #[cfg(any(feature = "std", test))] - let hash = self.compute_hash(); - #[cfg(not(any(feature = "std", test)))] - let hash = "unknown"; - write!( - formatter, - "transaction-v1-header[{}, chain_name: {}, timestamp: {}, ttl: {}, pricing mode: {}, \ - payment_amount: {}, initiator: {}]", - hash, - self.chain_name, - self.timestamp, - self.ttl, - self.pricing_mode, - if let Some(payment) = self.payment_amount { - payment.to_string() - } else { - "none".to_string() - }, - self.initiator_addr - ) - } -} diff --git a/casper_types_ver_2_0/src/transfer.rs b/casper_types_ver_2_0/src/transfer.rs deleted file mode 100644 index 38dfe8f0..00000000 --- a/casper_types_ver_2_0/src/transfer.rs +++ /dev/null @@ -1,414 +0,0 @@ -use alloc::{format, string::String, vec::Vec}; -use core::{ - array::TryFromSliceError, - convert::TryFrom, - fmt::{self, Debug, Display, Formatter}, -}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -use rand::{ - distributions::{Distribution, Standard}, - Rng, -}; -#[cfg(feature = "json-schema")] -use schemars::{gen::SchemaGenerator, schema::Schema, JsonSchema}; -use serde::{de::Error as SerdeError, Deserialize, Deserializer, Serialize, Serializer}; - -use crate::{ - account::AccountHash, - bytesrepr::{self, FromBytes, ToBytes}, - checksummed_hex, serde_helpers, CLType, CLTyped, DeployHash, URef, U512, -}; - -/// The length of a transfer address. -pub const TRANSFER_ADDR_LENGTH: usize = 32; -pub(super) const TRANSFER_ADDR_FORMATTED_STRING_PREFIX: &str = "transfer-"; - -/// Represents a transfer from one purse to another -#[derive(Debug, Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Serialize, Deserialize, Default)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[serde(deny_unknown_fields)] -pub struct Transfer { - /// Deploy that created the transfer - #[serde(with = "serde_helpers::deploy_hash_as_array")] - #[cfg_attr( - feature = "json-schema", - schemars( - with = "DeployHash", - description = "Hex-encoded Deploy hash of Deploy that created the transfer." - ) - )] - pub deploy_hash: DeployHash, - /// Account from which transfer was executed - pub from: AccountHash, - /// Account to which funds are transferred - pub to: Option, - /// Source purse - pub source: URef, - /// Target purse - pub target: URef, - /// Transfer amount - pub amount: U512, - /// Gas - pub gas: U512, - /// User-defined id - pub id: Option, -} - -impl Transfer { - /// Creates a [`Transfer`]. - #[allow(clippy::too_many_arguments)] - pub fn new( - deploy_hash: DeployHash, - from: AccountHash, - to: Option, - source: URef, - target: URef, - amount: U512, - gas: U512, - id: Option, - ) -> Self { - Transfer { - deploy_hash, - from, - to, - source, - target, - amount, - gas, - id, - } - } -} - -impl FromBytes for Transfer { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (deploy_hash, rem) = FromBytes::from_bytes(bytes)?; - let (from, rem) = AccountHash::from_bytes(rem)?; - let (to, rem) = >::from_bytes(rem)?; - let (source, rem) = URef::from_bytes(rem)?; - let (target, rem) = URef::from_bytes(rem)?; - let (amount, rem) = U512::from_bytes(rem)?; - let (gas, rem) = U512::from_bytes(rem)?; - let (id, rem) = >::from_bytes(rem)?; - Ok(( - Transfer { - deploy_hash, - from, - to, - source, - target, - amount, - gas, - id, - }, - rem, - )) - } -} - -impl ToBytes for Transfer { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut result = bytesrepr::allocate_buffer(self)?; - self.deploy_hash.write_bytes(&mut result)?; - self.from.write_bytes(&mut result)?; - self.to.write_bytes(&mut result)?; - self.source.write_bytes(&mut result)?; - self.target.write_bytes(&mut result)?; - self.amount.write_bytes(&mut result)?; - self.gas.write_bytes(&mut result)?; - self.id.write_bytes(&mut result)?; - Ok(result) - } - - fn serialized_length(&self) -> usize { - self.deploy_hash.serialized_length() - + self.from.serialized_length() - + self.to.serialized_length() - + self.source.serialized_length() - + self.target.serialized_length() - + self.amount.serialized_length() - + self.gas.serialized_length() - + self.id.serialized_length() - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.deploy_hash.write_bytes(writer)?; - self.from.write_bytes(writer)?; - self.to.write_bytes(writer)?; - self.source.write_bytes(writer)?; - self.target.write_bytes(writer)?; - self.amount.write_bytes(writer)?; - self.gas.write_bytes(writer)?; - self.id.write_bytes(writer)?; - Ok(()) - } -} - -/// Error returned when decoding a `TransferAddr` from a formatted string. -#[derive(Debug)] -#[non_exhaustive] -pub enum FromStrError { - /// The prefix is invalid. - InvalidPrefix, - /// The address is not valid hex. - Hex(base16::DecodeError), - /// The slice is the wrong length. - Length(TryFromSliceError), -} - -impl From for FromStrError { - fn from(error: base16::DecodeError) -> Self { - FromStrError::Hex(error) - } -} - -impl From for FromStrError { - fn from(error: TryFromSliceError) -> Self { - FromStrError::Length(error) - } -} - -impl Display for FromStrError { - fn fmt(&self, f: &mut Formatter) -> fmt::Result { - match self { - FromStrError::InvalidPrefix => write!(f, "prefix is not 'transfer-'"), - FromStrError::Hex(error) => { - write!(f, "failed to decode address portion from hex: {}", error) - } - FromStrError::Length(error) => write!(f, "address portion is wrong length: {}", error), - } - } -} - -/// A newtype wrapping a [u8; [TRANSFER_ADDR_LENGTH]] which is the raw bytes of the -/// transfer address. -#[derive(Default, PartialOrd, Ord, PartialEq, Eq, Hash, Clone, Copy)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -pub struct TransferAddr([u8; TRANSFER_ADDR_LENGTH]); - -impl TransferAddr { - /// Constructs a new `TransferAddr` instance from the raw bytes. - pub const fn new(value: [u8; TRANSFER_ADDR_LENGTH]) -> TransferAddr { - TransferAddr(value) - } - - /// Returns the raw bytes of the transfer address as an array. - pub fn value(&self) -> [u8; TRANSFER_ADDR_LENGTH] { - self.0 - } - - /// Returns the raw bytes of the transfer address as a `slice`. - pub fn as_bytes(&self) -> &[u8] { - &self.0 - } - - /// Formats the `TransferAddr` as a prefixed, hex-encoded string. - pub fn to_formatted_string(self) -> String { - format!( - "{}{}", - TRANSFER_ADDR_FORMATTED_STRING_PREFIX, - base16::encode_lower(&self.0), - ) - } - - /// Parses a string formatted as per `Self::to_formatted_string()` into a `TransferAddr`. - pub fn from_formatted_str(input: &str) -> Result { - let remainder = input - .strip_prefix(TRANSFER_ADDR_FORMATTED_STRING_PREFIX) - .ok_or(FromStrError::InvalidPrefix)?; - let bytes = - <[u8; TRANSFER_ADDR_LENGTH]>::try_from(checksummed_hex::decode(remainder)?.as_ref())?; - Ok(TransferAddr(bytes)) - } -} - -#[cfg(feature = "json-schema")] -impl JsonSchema for TransferAddr { - fn schema_name() -> String { - String::from("TransferAddr") - } - - fn json_schema(gen: &mut SchemaGenerator) -> Schema { - let schema = gen.subschema_for::(); - let mut schema_object = schema.into_object(); - schema_object.metadata().description = Some("Hex-encoded transfer address.".to_string()); - schema_object.into() - } -} - -impl Serialize for TransferAddr { - fn serialize(&self, serializer: S) -> Result { - if serializer.is_human_readable() { - self.to_formatted_string().serialize(serializer) - } else { - self.0.serialize(serializer) - } - } -} - -impl<'de> Deserialize<'de> for TransferAddr { - fn deserialize>(deserializer: D) -> Result { - if deserializer.is_human_readable() { - let formatted_string = String::deserialize(deserializer)?; - TransferAddr::from_formatted_str(&formatted_string).map_err(SerdeError::custom) - } else { - let bytes = <[u8; TRANSFER_ADDR_LENGTH]>::deserialize(deserializer)?; - Ok(TransferAddr(bytes)) - } - } -} - -impl Display for TransferAddr { - fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result { - write!(f, "{}", base16::encode_lower(&self.0)) - } -} - -impl Debug for TransferAddr { - fn fmt(&self, f: &mut Formatter) -> core::fmt::Result { - write!(f, "TransferAddr({})", base16::encode_lower(&self.0)) - } -} - -impl CLTyped for TransferAddr { - fn cl_type() -> CLType { - CLType::ByteArray(TRANSFER_ADDR_LENGTH as u32) - } -} - -impl ToBytes for TransferAddr { - #[inline(always)] - fn to_bytes(&self) -> Result, bytesrepr::Error> { - self.0.to_bytes() - } - - #[inline(always)] - fn serialized_length(&self) -> usize { - self.0.serialized_length() - } - - #[inline(always)] - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - self.0.write_bytes(writer)?; - Ok(()) - } -} - -impl FromBytes for TransferAddr { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (bytes, remainder) = FromBytes::from_bytes(bytes)?; - Ok((TransferAddr::new(bytes), remainder)) - } -} - -impl AsRef<[u8]> for TransferAddr { - fn as_ref(&self) -> &[u8] { - self.0.as_ref() - } -} - -impl Distribution for Standard { - fn sample(&self, rng: &mut R) -> TransferAddr { - TransferAddr::new(rng.gen()) - } -} - -/// Generators for [`Transfer`] -#[cfg(any(feature = "testing", feature = "gens", test))] -pub mod gens { - use proptest::prelude::{prop::option, Arbitrary, Strategy}; - - use crate::{ - deploy_info::gens::{account_hash_arb, deploy_hash_arb}, - gens::{u512_arb, uref_arb}, - Transfer, - }; - - /// Creates an arbitrary [`Transfer`] - pub fn transfer_arb() -> impl Strategy { - ( - deploy_hash_arb(), - account_hash_arb(), - option::of(account_hash_arb()), - uref_arb(), - uref_arb(), - u512_arb(), - u512_arb(), - option::of(::arbitrary()), - ) - .prop_map(|(deploy_hash, from, to, source, target, amount, gas, id)| { - Transfer { - deploy_hash, - from, - to, - source, - target, - amount, - gas, - id, - } - }) - } -} - -#[cfg(test)] -mod tests { - use proptest::prelude::*; - - use crate::bytesrepr; - - use super::*; - - proptest! { - #[test] - fn test_serialization_roundtrip(transfer in gens::transfer_arb()) { - bytesrepr::test_serialization_roundtrip(&transfer) - } - } - - #[test] - fn transfer_addr_from_str() { - let transfer_address = TransferAddr([4; 32]); - let encoded = transfer_address.to_formatted_string(); - let decoded = TransferAddr::from_formatted_str(&encoded).unwrap(); - assert_eq!(transfer_address, decoded); - - let invalid_prefix = - "transfe-0000000000000000000000000000000000000000000000000000000000000000"; - assert!(TransferAddr::from_formatted_str(invalid_prefix).is_err()); - - let invalid_prefix = - "transfer0000000000000000000000000000000000000000000000000000000000000000"; - assert!(TransferAddr::from_formatted_str(invalid_prefix).is_err()); - - let short_addr = "transfer-00000000000000000000000000000000000000000000000000000000000000"; - assert!(TransferAddr::from_formatted_str(short_addr).is_err()); - - let long_addr = - "transfer-000000000000000000000000000000000000000000000000000000000000000000"; - assert!(TransferAddr::from_formatted_str(long_addr).is_err()); - - let invalid_hex = - "transfer-000000000000000000000000000000000000000000000000000000000000000g"; - assert!(TransferAddr::from_formatted_str(invalid_hex).is_err()); - } - - #[test] - fn transfer_addr_serde_roundtrip() { - let transfer_address = TransferAddr([255; 32]); - let serialized = bincode::serialize(&transfer_address).unwrap(); - let decoded = bincode::deserialize(&serialized).unwrap(); - assert_eq!(transfer_address, decoded); - } - - #[test] - fn transfer_addr_json_roundtrip() { - let transfer_address = TransferAddr([255; 32]); - let json_string = serde_json::to_string_pretty(&transfer_address).unwrap(); - let decoded = serde_json::from_str(&json_string).unwrap(); - assert_eq!(transfer_address, decoded); - } -} diff --git a/casper_types_ver_2_0/src/transfer_result.rs b/casper_types_ver_2_0/src/transfer_result.rs deleted file mode 100644 index ba9ce66b..00000000 --- a/casper_types_ver_2_0/src/transfer_result.rs +++ /dev/null @@ -1,39 +0,0 @@ -use core::fmt::Debug; - -use crate::ApiError; - -/// The result of an attempt to transfer between purses. -pub type TransferResult = Result; - -/// The result of a successful transfer between purses. -#[derive(Debug, Copy, Clone, PartialEq, Eq)] -#[repr(i32)] -pub enum TransferredTo { - /// The destination account already existed. - ExistingAccount = 0, - /// The destination account was created. - NewAccount = 1, -} - -impl TransferredTo { - /// Converts an `i32` to a [`TransferResult`], where: - /// * `0` represents `Ok(TransferredTo::ExistingAccount)`, - /// * `1` represents `Ok(TransferredTo::NewAccount)`, - /// * all other inputs are mapped to `Err(ApiError::Transfer)`. - pub fn result_from(value: i32) -> TransferResult { - match value { - x if x == TransferredTo::ExistingAccount as i32 => Ok(TransferredTo::ExistingAccount), - x if x == TransferredTo::NewAccount as i32 => Ok(TransferredTo::NewAccount), - _ => Err(ApiError::Transfer), - } - } - - // This conversion is not intended to be used by third party crates. - #[doc(hidden)] - pub fn i32_from(result: TransferResult) -> i32 { - match result { - Ok(transferred_to) => transferred_to as i32, - Err(_) => 2, - } - } -} diff --git a/casper_types_ver_2_0/src/uint.rs b/casper_types_ver_2_0/src/uint.rs deleted file mode 100644 index bdb30a45..00000000 --- a/casper_types_ver_2_0/src/uint.rs +++ /dev/null @@ -1,1001 +0,0 @@ -use alloc::{ - format, - string::{String, ToString}, - vec::Vec, -}; -use core::{ - fmt::{self, Formatter}, - iter::Sum, - ops::Add, -}; - -use num_integer::Integer; -use num_traits::{ - AsPrimitive, Bounded, CheckedAdd, CheckedMul, CheckedSub, Num, One, Unsigned, WrappingAdd, - WrappingSub, Zero, -}; -use rand::{ - distributions::{Distribution, Standard}, - Rng, -}; -use serde::{ - de::{self, Deserialize, Deserializer, MapAccess, SeqAccess, Visitor}, - ser::{Serialize, SerializeStruct, Serializer}, -}; - -use crate::bytesrepr::{self, Error, FromBytes, ToBytes, U8_SERIALIZED_LENGTH}; - -#[allow( - clippy::assign_op_pattern, - clippy::ptr_offset_with_cast, - clippy::manual_range_contains, - clippy::range_plus_one, - clippy::transmute_ptr_to_ptr, - clippy::reversed_empty_ranges -)] -mod macro_code { - #[cfg(feature = "datasize")] - use datasize::DataSize; - use uint::construct_uint; - - construct_uint! { - #[cfg_attr(feature = "datasize", derive(DataSize))] - pub struct U512(8); - } - construct_uint! { - #[cfg_attr(feature = "datasize", derive(DataSize))] - pub struct U256(4); - } - construct_uint! { - #[cfg_attr(feature = "datasize", derive(DataSize))] - pub struct U128(2); - } -} - -pub use self::macro_code::{U128, U256, U512}; - -/// Error type for parsing [`U128`], [`U256`], [`U512`] from a string. -#[derive(Debug)] -#[non_exhaustive] -pub enum UIntParseError { - /// Contains the parsing error from the `uint` crate, which only supports base-10 parsing. - FromDecStr(uint::FromDecStrErr), - /// Parsing was attempted on a string representing the number in some base other than 10. - /// - /// Note: a general radix may be supported in the future. - InvalidRadix, -} - -macro_rules! impl_traits_for_uint { - ($type:ident, $total_bytes:expr, $test_mod:ident) => { - impl Serialize for $type { - fn serialize(&self, serializer: S) -> Result { - if serializer.is_human_readable() { - return self.to_string().serialize(serializer); - } - - let mut buffer = [0u8; $total_bytes]; - self.to_little_endian(&mut buffer); - let non_zero_bytes: Vec = buffer - .iter() - .rev() - .skip_while(|b| **b == 0) - .cloned() - .collect(); - let num_bytes = non_zero_bytes.len(); - - let mut state = serializer.serialize_struct("bigint", num_bytes + 1)?; - state.serialize_field("", &(num_bytes as u8))?; - - for byte in non_zero_bytes.into_iter().rev() { - state.serialize_field("", &byte)?; - } - state.end() - } - } - - impl<'de> Deserialize<'de> for $type { - fn deserialize>(deserializer: D) -> Result { - struct BigNumVisitor; - - impl<'de> Visitor<'de> for BigNumVisitor { - type Value = $type; - - fn expecting(&self, formatter: &mut Formatter) -> fmt::Result { - formatter.write_str("bignum struct") - } - - fn visit_seq>( - self, - mut sequence: V, - ) -> Result<$type, V::Error> { - let length: u8 = sequence - .next_element()? - .ok_or_else(|| de::Error::invalid_length(0, &self))?; - let mut buffer = [0u8; $total_bytes]; - for index in 0..length as usize { - let value = sequence - .next_element()? - .ok_or_else(|| de::Error::invalid_length(index + 1, &self))?; - buffer[index as usize] = value; - } - let result = $type::from_little_endian(&buffer); - Ok(result) - } - - fn visit_map>(self, mut map: V) -> Result<$type, V::Error> { - let _length_key: u8 = map - .next_key()? - .ok_or_else(|| de::Error::missing_field("length"))?; - let length: u8 = map - .next_value() - .map_err(|_| de::Error::invalid_length(0, &self))?; - let mut buffer = [0u8; $total_bytes]; - for index in 0..length { - let _byte_key: u8 = map - .next_key()? - .ok_or_else(|| de::Error::missing_field("byte"))?; - let value = map.next_value().map_err(|_| { - de::Error::invalid_length(index as usize + 1, &self) - })?; - buffer[index as usize] = value; - } - let result = $type::from_little_endian(&buffer); - Ok(result) - } - } - - const FIELDS: &'static [&'static str] = &[ - "0", "1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", "13", "14", - "15", "16", "17", "18", "19", "20", "21", "22", "23", "24", "25", "26", "27", - "28", "29", "30", "31", "32", "33", "34", "35", "36", "37", "38", "39", "40", - "41", "42", "43", "44", "45", "46", "47", "48", "49", "50", "51", "52", "53", - "54", "55", "56", "57", "58", "59", "60", "61", "62", "63", "64", - ]; - - if deserializer.is_human_readable() { - let decimal_string = String::deserialize(deserializer)?; - return Self::from_dec_str(&decimal_string) - .map_err(|error| de::Error::custom(format!("{:?}", error))); - } - - deserializer.deserialize_struct("bigint", FIELDS, BigNumVisitor) - } - } - - impl ToBytes for $type { - fn to_bytes(&self) -> Result, Error> { - let mut buf = [0u8; $total_bytes]; - self.to_little_endian(&mut buf); - let mut non_zero_bytes: Vec = - buf.iter().rev().skip_while(|b| **b == 0).cloned().collect(); - let num_bytes = non_zero_bytes.len() as u8; - non_zero_bytes.push(num_bytes); - non_zero_bytes.reverse(); - Ok(non_zero_bytes) - } - - fn serialized_length(&self) -> usize { - let mut buf = [0u8; $total_bytes]; - self.to_little_endian(&mut buf); - let non_zero_bytes = buf.iter().rev().skip_while(|b| **b == 0).count(); - U8_SERIALIZED_LENGTH + non_zero_bytes - } - } - - impl FromBytes for $type { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (num_bytes, rem): (u8, &[u8]) = FromBytes::from_bytes(bytes)?; - - if num_bytes > $total_bytes { - Err(Error::Formatting) - } else { - let (value, rem) = bytesrepr::safe_split_at(rem, num_bytes as usize)?; - let result = $type::from_little_endian(value); - Ok((result, rem)) - } - } - } - - // Trait implementations for unifying U* as numeric types - impl Zero for $type { - fn zero() -> Self { - $type::zero() - } - - fn is_zero(&self) -> bool { - self.is_zero() - } - } - - impl One for $type { - fn one() -> Self { - $type::one() - } - } - - // Requires Zero and One to be implemented - impl Num for $type { - type FromStrRadixErr = UIntParseError; - fn from_str_radix(str: &str, radix: u32) -> Result { - if radix == 10 { - $type::from_dec_str(str).map_err(UIntParseError::FromDecStr) - } else { - // TODO: other radix parsing - Err(UIntParseError::InvalidRadix) - } - } - } - - // Requires Num to be implemented - impl Unsigned for $type {} - - // Additional numeric trait, which also holds for these types - impl Bounded for $type { - fn min_value() -> Self { - $type::zero() - } - - fn max_value() -> Self { - $type::MAX - } - } - - // Instead of implementing arbitrary methods we can use existing traits from num_trait - // crate. - impl WrappingAdd for $type { - fn wrapping_add(&self, other: &$type) -> $type { - self.overflowing_add(*other).0 - } - } - - impl WrappingSub for $type { - fn wrapping_sub(&self, other: &$type) -> $type { - self.overflowing_sub(*other).0 - } - } - - impl CheckedMul for $type { - fn checked_mul(&self, v: &$type) -> Option<$type> { - $type::checked_mul(*self, *v) - } - } - - impl CheckedSub for $type { - fn checked_sub(&self, v: &$type) -> Option<$type> { - $type::checked_sub(*self, *v) - } - } - - impl CheckedAdd for $type { - fn checked_add(&self, v: &$type) -> Option<$type> { - $type::checked_add(*self, *v) - } - } - - impl Integer for $type { - /// Unsigned integer division. Returns the same result as `div` (`/`). - #[inline] - fn div_floor(&self, other: &Self) -> Self { - *self / *other - } - - /// Unsigned integer modulo operation. Returns the same result as `rem` (`%`). - #[inline] - fn mod_floor(&self, other: &Self) -> Self { - *self % *other - } - - /// Calculates the Greatest Common Divisor (GCD) of the number and `other` - #[inline] - fn gcd(&self, other: &Self) -> Self { - let zero = Self::zero(); - // Use Stein's algorithm - let mut m = *self; - let mut n = *other; - if m == zero || n == zero { - return m | n; - } - - // find common factors of 2 - let shift = (m | n).trailing_zeros(); - - // divide n and m by 2 until odd - m >>= m.trailing_zeros(); - n >>= n.trailing_zeros(); - - while m != n { - if m > n { - m -= n; - m >>= m.trailing_zeros(); - } else { - n -= m; - n >>= n.trailing_zeros(); - } - } - m << shift - } - - /// Calculates the Lowest Common Multiple (LCM) of the number and `other`. - #[inline] - fn lcm(&self, other: &Self) -> Self { - self.gcd_lcm(other).1 - } - - /// Calculates the Greatest Common Divisor (GCD) and - /// Lowest Common Multiple (LCM) of the number and `other`. - #[inline] - fn gcd_lcm(&self, other: &Self) -> (Self, Self) { - if self.is_zero() && other.is_zero() { - return (Self::zero(), Self::zero()); - } - let gcd = self.gcd(other); - let lcm = *self * (*other / gcd); - (gcd, lcm) - } - - /// Deprecated, use `is_multiple_of` instead. - #[inline] - fn divides(&self, other: &Self) -> bool { - self.is_multiple_of(other) - } - - /// Returns `true` if the number is a multiple of `other`. - #[inline] - fn is_multiple_of(&self, other: &Self) -> bool { - *self % *other == $type::zero() - } - - /// Returns `true` if the number is divisible by `2`. - #[inline] - fn is_even(&self) -> bool { - (self.0[0]) & 1 == 0 - } - - /// Returns `true` if the number is not divisible by `2`. - #[inline] - fn is_odd(&self) -> bool { - !self.is_even() - } - - /// Simultaneous truncated integer division and modulus. - #[inline] - fn div_rem(&self, other: &Self) -> (Self, Self) { - (*self / *other, *self % *other) - } - } - - impl AsPrimitive<$type> for i32 { - fn as_(self) -> $type { - if self >= 0 { - $type::from(self as u32) - } else { - let abs = 0u32.wrapping_sub(self as u32); - $type::zero().wrapping_sub(&$type::from(abs)) - } - } - } - - impl AsPrimitive<$type> for i64 { - fn as_(self) -> $type { - if self >= 0 { - $type::from(self as u64) - } else { - let abs = 0u64.wrapping_sub(self as u64); - $type::zero().wrapping_sub(&$type::from(abs)) - } - } - } - - impl AsPrimitive<$type> for u8 { - fn as_(self) -> $type { - $type::from(self) - } - } - - impl AsPrimitive<$type> for u32 { - fn as_(self) -> $type { - $type::from(self) - } - } - - impl AsPrimitive<$type> for u64 { - fn as_(self) -> $type { - $type::from(self) - } - } - - impl AsPrimitive for $type { - fn as_(self) -> i32 { - self.0[0] as i32 - } - } - - impl AsPrimitive for $type { - fn as_(self) -> i64 { - self.0[0] as i64 - } - } - - impl AsPrimitive for $type { - fn as_(self) -> u8 { - self.0[0] as u8 - } - } - - impl AsPrimitive for $type { - fn as_(self) -> u32 { - self.0[0] as u32 - } - } - - impl AsPrimitive for $type { - fn as_(self) -> u64 { - self.0[0] - } - } - - impl Sum for $type { - fn sum>(iter: I) -> Self { - iter.fold($type::zero(), Add::add) - } - } - - impl Distribution<$type> for Standard { - fn sample(&self, rng: &mut R) -> $type { - let mut raw_bytes = [0u8; $total_bytes]; - rng.fill_bytes(raw_bytes.as_mut()); - $type::from(raw_bytes) - } - } - - #[cfg(feature = "json-schema")] - impl schemars::JsonSchema for $type { - fn schema_name() -> String { - format!("U{}", $total_bytes * 8) - } - - fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { - let schema = gen.subschema_for::(); - let mut schema_object = schema.into_object(); - schema_object.metadata().description = Some(format!( - "Decimal representation of a {}-bit integer.", - $total_bytes * 8 - )); - schema_object.into() - } - } - - #[cfg(test)] - mod $test_mod { - use super::*; - - #[test] - fn test_div_mod_floor() { - assert_eq!($type::from(10).div_floor(&$type::from(3)), $type::from(3)); - assert_eq!($type::from(10).mod_floor(&$type::from(3)), $type::from(1)); - assert_eq!( - $type::from(10).div_mod_floor(&$type::from(3)), - ($type::from(3), $type::from(1)) - ); - assert_eq!($type::from(5).div_floor(&$type::from(5)), $type::from(1)); - assert_eq!($type::from(5).mod_floor(&$type::from(5)), $type::from(0)); - assert_eq!( - $type::from(5).div_mod_floor(&$type::from(5)), - ($type::from(1), $type::from(0)) - ); - assert_eq!($type::from(3).div_floor(&$type::from(7)), $type::from(0)); - assert_eq!($type::from(3).mod_floor(&$type::from(7)), $type::from(3)); - assert_eq!( - $type::from(3).div_mod_floor(&$type::from(7)), - ($type::from(0), $type::from(3)) - ); - } - - #[test] - fn test_gcd() { - assert_eq!($type::from(10).gcd(&$type::from(2)), $type::from(2)); - assert_eq!($type::from(10).gcd(&$type::from(3)), $type::from(1)); - assert_eq!($type::from(0).gcd(&$type::from(3)), $type::from(3)); - assert_eq!($type::from(3).gcd(&$type::from(3)), $type::from(3)); - assert_eq!($type::from(56).gcd(&$type::from(42)), $type::from(14)); - assert_eq!( - $type::MAX.gcd(&($type::MAX / $type::from(2))), - $type::from(1) - ); - assert_eq!($type::from(15).gcd(&$type::from(17)), $type::from(1)); - } - - #[test] - fn test_lcm() { - assert_eq!($type::from(1).lcm(&$type::from(0)), $type::from(0)); - assert_eq!($type::from(0).lcm(&$type::from(1)), $type::from(0)); - assert_eq!($type::from(1).lcm(&$type::from(1)), $type::from(1)); - assert_eq!($type::from(8).lcm(&$type::from(9)), $type::from(72)); - assert_eq!($type::from(11).lcm(&$type::from(5)), $type::from(55)); - assert_eq!($type::from(15).lcm(&$type::from(17)), $type::from(255)); - assert_eq!($type::from(4).lcm(&$type::from(8)), $type::from(8)); - } - - #[test] - fn test_is_multiple_of() { - assert!($type::from(6).is_multiple_of(&$type::from(6))); - assert!($type::from(6).is_multiple_of(&$type::from(3))); - assert!($type::from(6).is_multiple_of(&$type::from(1))); - assert!(!$type::from(3).is_multiple_of(&$type::from(5))) - } - - #[test] - fn is_even() { - assert_eq!($type::from(0).is_even(), true); - assert_eq!($type::from(1).is_even(), false); - assert_eq!($type::from(2).is_even(), true); - assert_eq!($type::from(3).is_even(), false); - assert_eq!($type::from(4).is_even(), true); - } - - #[test] - fn is_odd() { - assert_eq!($type::from(0).is_odd(), false); - assert_eq!($type::from(1).is_odd(), true); - assert_eq!($type::from(2).is_odd(), false); - assert_eq!($type::from(3).is_odd(), true); - assert_eq!($type::from(4).is_odd(), false); - } - - #[test] - #[should_panic] - fn overflow_mul_test() { - let _ = $type::MAX * $type::from(2); - } - - #[test] - #[should_panic] - fn overflow_add_test() { - let _ = $type::MAX + $type::from(1); - } - - #[test] - #[should_panic] - fn underflow_sub_test() { - let _ = $type::zero() - $type::from(1); - } - } - }; -} - -impl_traits_for_uint!(U128, 16, u128_test); -impl_traits_for_uint!(U256, 32, u256_test); -impl_traits_for_uint!(U512, 64, u512_test); - -impl AsPrimitive for U128 { - fn as_(self) -> U128 { - self - } -} - -impl AsPrimitive for U128 { - fn as_(self) -> U256 { - let mut result = U256::zero(); - result.0[..2].clone_from_slice(&self.0[..2]); - result - } -} - -impl AsPrimitive for U128 { - fn as_(self) -> U512 { - let mut result = U512::zero(); - result.0[..2].clone_from_slice(&self.0[..2]); - result - } -} - -impl AsPrimitive for U256 { - fn as_(self) -> U128 { - let mut result = U128::zero(); - result.0[..2].clone_from_slice(&self.0[..2]); - result - } -} - -impl AsPrimitive for U256 { - fn as_(self) -> U256 { - self - } -} - -impl AsPrimitive for U256 { - fn as_(self) -> U512 { - let mut result = U512::zero(); - result.0[..4].clone_from_slice(&self.0[..4]); - result - } -} - -impl AsPrimitive for U512 { - fn as_(self) -> U128 { - let mut result = U128::zero(); - result.0[..2].clone_from_slice(&self.0[..2]); - result - } -} - -impl AsPrimitive for U512 { - fn as_(self) -> U256 { - let mut result = U256::zero(); - result.0[..4].clone_from_slice(&self.0[..4]); - result - } -} - -impl AsPrimitive for U512 { - fn as_(self) -> U512 { - self - } -} - -#[cfg(test)] -mod tests { - use std::fmt::Debug; - - use serde::de::DeserializeOwned; - - use super::*; - - fn check_as_i32>(expected: i32, input: T) { - assert_eq!(expected, input.as_()); - } - - fn check_as_i64>(expected: i64, input: T) { - assert_eq!(expected, input.as_()); - } - - fn check_as_u8>(expected: u8, input: T) { - assert_eq!(expected, input.as_()); - } - - fn check_as_u32>(expected: u32, input: T) { - assert_eq!(expected, input.as_()); - } - - fn check_as_u64>(expected: u64, input: T) { - assert_eq!(expected, input.as_()); - } - - fn check_as_u128>(expected: U128, input: T) { - assert_eq!(expected, input.as_()); - } - - fn check_as_u256>(expected: U256, input: T) { - assert_eq!(expected, input.as_()); - } - - fn check_as_u512>(expected: U512, input: T) { - assert_eq!(expected, input.as_()); - } - - #[test] - fn as_primitive_from_i32() { - let mut input = 0_i32; - check_as_i32(0, input); - check_as_i64(0, input); - check_as_u8(0, input); - check_as_u32(0, input); - check_as_u64(0, input); - check_as_u128(U128::zero(), input); - check_as_u256(U256::zero(), input); - check_as_u512(U512::zero(), input); - - input = i32::max_value() - 1; - check_as_i32(input, input); - check_as_i64(i64::from(input), input); - check_as_u8(input as u8, input); - check_as_u32(input as u32, input); - check_as_u64(input as u64, input); - check_as_u128(U128::from(input), input); - check_as_u256(U256::from(input), input); - check_as_u512(U512::from(input), input); - - input = i32::min_value() + 1; - check_as_i32(input, input); - check_as_i64(i64::from(input), input); - check_as_u8(input as u8, input); - check_as_u32(input as u32, input); - check_as_u64(input as u64, input); - // i32::min_value() is -1 - i32::max_value() - check_as_u128( - U128::zero().wrapping_sub(&U128::from(i32::max_value())), - input, - ); - check_as_u256( - U256::zero().wrapping_sub(&U256::from(i32::max_value())), - input, - ); - check_as_u512( - U512::zero().wrapping_sub(&U512::from(i32::max_value())), - input, - ); - } - - #[test] - fn as_primitive_from_i64() { - let mut input = 0_i64; - check_as_i32(0, input); - check_as_i64(0, input); - check_as_u8(0, input); - check_as_u32(0, input); - check_as_u64(0, input); - check_as_u128(U128::zero(), input); - check_as_u256(U256::zero(), input); - check_as_u512(U512::zero(), input); - - input = i64::max_value() - 1; - check_as_i32(input as i32, input); - check_as_i64(input, input); - check_as_u8(input as u8, input); - check_as_u32(input as u32, input); - check_as_u64(input as u64, input); - check_as_u128(U128::from(input), input); - check_as_u256(U256::from(input), input); - check_as_u512(U512::from(input), input); - - input = i64::min_value() + 1; - check_as_i32(input as i32, input); - check_as_i64(input, input); - check_as_u8(input as u8, input); - check_as_u32(input as u32, input); - check_as_u64(input as u64, input); - // i64::min_value() is (-1 - i64::max_value()) - check_as_u128( - U128::zero().wrapping_sub(&U128::from(i64::max_value())), - input, - ); - check_as_u256( - U256::zero().wrapping_sub(&U256::from(i64::max_value())), - input, - ); - check_as_u512( - U512::zero().wrapping_sub(&U512::from(i64::max_value())), - input, - ); - } - - #[test] - fn as_primitive_from_u8() { - let mut input = 0_u8; - check_as_i32(0, input); - check_as_i64(0, input); - check_as_u8(0, input); - check_as_u32(0, input); - check_as_u64(0, input); - check_as_u128(U128::zero(), input); - check_as_u256(U256::zero(), input); - check_as_u512(U512::zero(), input); - - input = u8::max_value() - 1; - check_as_i32(i32::from(input), input); - check_as_i64(i64::from(input), input); - check_as_u8(input, input); - check_as_u32(u32::from(input), input); - check_as_u64(u64::from(input), input); - check_as_u128(U128::from(input), input); - check_as_u256(U256::from(input), input); - check_as_u512(U512::from(input), input); - } - - #[test] - fn as_primitive_from_u32() { - let mut input = 0_u32; - check_as_i32(0, input); - check_as_i64(0, input); - check_as_u8(0, input); - check_as_u32(0, input); - check_as_u64(0, input); - check_as_u128(U128::zero(), input); - check_as_u256(U256::zero(), input); - check_as_u512(U512::zero(), input); - - input = u32::max_value() - 1; - check_as_i32(input as i32, input); - check_as_i64(i64::from(input), input); - check_as_u8(input as u8, input); - check_as_u32(input, input); - check_as_u64(u64::from(input), input); - check_as_u128(U128::from(input), input); - check_as_u256(U256::from(input), input); - check_as_u512(U512::from(input), input); - } - - #[test] - fn as_primitive_from_u64() { - let mut input = 0_u64; - check_as_i32(0, input); - check_as_i64(0, input); - check_as_u8(0, input); - check_as_u32(0, input); - check_as_u64(0, input); - check_as_u128(U128::zero(), input); - check_as_u256(U256::zero(), input); - check_as_u512(U512::zero(), input); - - input = u64::max_value() - 1; - check_as_i32(input as i32, input); - check_as_i64(input as i64, input); - check_as_u8(input as u8, input); - check_as_u32(input as u32, input); - check_as_u64(input, input); - check_as_u128(U128::from(input), input); - check_as_u256(U256::from(input), input); - check_as_u512(U512::from(input), input); - } - - fn make_little_endian_arrays(little_endian_bytes: &[u8]) -> ([u8; 4], [u8; 8]) { - let le_32 = { - let mut le_32 = [0; 4]; - le_32.copy_from_slice(&little_endian_bytes[..4]); - le_32 - }; - - let le_64 = { - let mut le_64 = [0; 8]; - le_64.copy_from_slice(&little_endian_bytes[..8]); - le_64 - }; - - (le_32, le_64) - } - - #[test] - fn as_primitive_from_u128() { - let mut input = U128::zero(); - check_as_i32(0, input); - check_as_i64(0, input); - check_as_u8(0, input); - check_as_u32(0, input); - check_as_u64(0, input); - check_as_u128(U128::zero(), input); - check_as_u256(U256::zero(), input); - check_as_u512(U512::zero(), input); - - input = U128::max_value() - 1; - - let mut little_endian_bytes = [0_u8; 64]; - input.to_little_endian(&mut little_endian_bytes[..16]); - let (le_32, le_64) = make_little_endian_arrays(&little_endian_bytes); - - check_as_i32(i32::from_le_bytes(le_32), input); - check_as_i64(i64::from_le_bytes(le_64), input); - check_as_u8(little_endian_bytes[0], input); - check_as_u32(u32::from_le_bytes(le_32), input); - check_as_u64(u64::from_le_bytes(le_64), input); - check_as_u128(U128::from_little_endian(&little_endian_bytes[..16]), input); - check_as_u256(U256::from_little_endian(&little_endian_bytes[..32]), input); - check_as_u512(U512::from_little_endian(&little_endian_bytes), input); - } - - #[test] - fn as_primitive_from_u256() { - let mut input = U256::zero(); - check_as_i32(0, input); - check_as_i64(0, input); - check_as_u8(0, input); - check_as_u32(0, input); - check_as_u64(0, input); - check_as_u128(U128::zero(), input); - check_as_u256(U256::zero(), input); - check_as_u512(U512::zero(), input); - - input = U256::max_value() - 1; - - let mut little_endian_bytes = [0_u8; 64]; - input.to_little_endian(&mut little_endian_bytes[..32]); - let (le_32, le_64) = make_little_endian_arrays(&little_endian_bytes); - - check_as_i32(i32::from_le_bytes(le_32), input); - check_as_i64(i64::from_le_bytes(le_64), input); - check_as_u8(little_endian_bytes[0], input); - check_as_u32(u32::from_le_bytes(le_32), input); - check_as_u64(u64::from_le_bytes(le_64), input); - check_as_u128(U128::from_little_endian(&little_endian_bytes[..16]), input); - check_as_u256(U256::from_little_endian(&little_endian_bytes[..32]), input); - check_as_u512(U512::from_little_endian(&little_endian_bytes), input); - } - - #[test] - fn as_primitive_from_u512() { - let mut input = U512::zero(); - check_as_i32(0, input); - check_as_i64(0, input); - check_as_u8(0, input); - check_as_u32(0, input); - check_as_u64(0, input); - check_as_u128(U128::zero(), input); - check_as_u256(U256::zero(), input); - check_as_u512(U512::zero(), input); - - input = U512::max_value() - 1; - - let mut little_endian_bytes = [0_u8; 64]; - input.to_little_endian(&mut little_endian_bytes); - let (le_32, le_64) = make_little_endian_arrays(&little_endian_bytes); - - check_as_i32(i32::from_le_bytes(le_32), input); - check_as_i64(i64::from_le_bytes(le_64), input); - check_as_u8(little_endian_bytes[0], input); - check_as_u32(u32::from_le_bytes(le_32), input); - check_as_u64(u64::from_le_bytes(le_64), input); - check_as_u128(U128::from_little_endian(&little_endian_bytes[..16]), input); - check_as_u256(U256::from_little_endian(&little_endian_bytes[..32]), input); - check_as_u512(U512::from_little_endian(&little_endian_bytes), input); - } - - #[test] - fn wrapping_test_u512() { - let max = U512::max_value(); - let value = max.wrapping_add(&1.into()); - assert_eq!(value, 0.into()); - - let min = U512::min_value(); - let value = min.wrapping_sub(&1.into()); - assert_eq!(value, U512::max_value()); - } - - #[test] - fn wrapping_test_u256() { - let max = U256::max_value(); - let value = max.wrapping_add(&1.into()); - assert_eq!(value, 0.into()); - - let min = U256::min_value(); - let value = min.wrapping_sub(&1.into()); - assert_eq!(value, U256::max_value()); - } - - #[test] - fn wrapping_test_u128() { - let max = U128::max_value(); - let value = max.wrapping_add(&1.into()); - assert_eq!(value, 0.into()); - - let min = U128::min_value(); - let value = min.wrapping_sub(&1.into()); - assert_eq!(value, U128::max_value()); - } - - fn serde_roundtrip(value: T) { - { - let serialized = bincode::serialize(&value).unwrap(); - let deserialized = bincode::deserialize(serialized.as_slice()).unwrap(); - assert_eq!(value, deserialized); - } - { - let serialized = serde_json::to_string_pretty(&value).unwrap(); - let deserialized = serde_json::from_str(&serialized).unwrap(); - assert_eq!(value, deserialized); - } - } - - #[test] - fn serde_roundtrip_u512() { - serde_roundtrip(U512::min_value()); - serde_roundtrip(U512::from(1)); - serde_roundtrip(U512::from(u64::max_value())); - serde_roundtrip(U512::max_value()); - } - - #[test] - fn serde_roundtrip_u256() { - serde_roundtrip(U256::min_value()); - serde_roundtrip(U256::from(1)); - serde_roundtrip(U256::from(u64::max_value())); - serde_roundtrip(U256::max_value()); - } - - #[test] - fn serde_roundtrip_u128() { - serde_roundtrip(U128::min_value()); - serde_roundtrip(U128::from(1)); - serde_roundtrip(U128::from(u64::max_value())); - serde_roundtrip(U128::max_value()); - } -} diff --git a/casper_types_ver_2_0/src/uref.rs b/casper_types_ver_2_0/src/uref.rs deleted file mode 100644 index c24b2e85..00000000 --- a/casper_types_ver_2_0/src/uref.rs +++ /dev/null @@ -1,424 +0,0 @@ -use alloc::{format, string::String, vec::Vec}; -use core::{ - array::TryFromSliceError, - convert::TryFrom, - fmt::{self, Debug, Display, Formatter}, - num::ParseIntError, -}; - -#[cfg(feature = "datasize")] -use datasize::DataSize; -use rand::{ - distributions::{Distribution, Standard}, - Rng, -}; -#[cfg(feature = "json-schema")] -use schemars::{gen::SchemaGenerator, schema::Schema, JsonSchema}; -use serde::{de::Error as SerdeError, Deserialize, Deserializer, Serialize, Serializer}; - -use crate::{ - bytesrepr, - bytesrepr::{Error, FromBytes}, - checksummed_hex, AccessRights, ApiError, Key, ACCESS_RIGHTS_SERIALIZED_LENGTH, -}; - -/// The number of bytes in a [`URef`] address. -pub const UREF_ADDR_LENGTH: usize = 32; - -/// The number of bytes in a serialized [`URef`] where the [`AccessRights`] are not `None`. -pub const UREF_SERIALIZED_LENGTH: usize = UREF_ADDR_LENGTH + ACCESS_RIGHTS_SERIALIZED_LENGTH; - -pub(super) const UREF_FORMATTED_STRING_PREFIX: &str = "uref-"; - -/// The address of a `URef` (unforgeable reference) on the network. -pub type URefAddr = [u8; UREF_ADDR_LENGTH]; - -/// Error while parsing a URef from a formatted string. -#[derive(Debug)] -#[non_exhaustive] -pub enum FromStrError { - /// Prefix is not "uref-". - InvalidPrefix, - /// No access rights as suffix. - MissingSuffix, - /// Access rights are invalid. - InvalidAccessRights, - /// Failed to decode address portion of URef. - Hex(base16::DecodeError), - /// Failed to parse an int. - Int(ParseIntError), - /// The address portion is the wrong length. - Address(TryFromSliceError), -} - -impl From for FromStrError { - fn from(error: base16::DecodeError) -> Self { - FromStrError::Hex(error) - } -} - -impl From for FromStrError { - fn from(error: ParseIntError) -> Self { - FromStrError::Int(error) - } -} - -impl From for FromStrError { - fn from(error: TryFromSliceError) -> Self { - FromStrError::Address(error) - } -} - -impl Display for FromStrError { - fn fmt(&self, f: &mut Formatter) -> fmt::Result { - match self { - FromStrError::InvalidPrefix => write!(f, "prefix is not 'uref-'"), - FromStrError::MissingSuffix => write!(f, "no access rights as suffix"), - FromStrError::InvalidAccessRights => write!(f, "invalid access rights"), - FromStrError::Hex(error) => { - write!(f, "failed to decode address portion from hex: {}", error) - } - FromStrError::Int(error) => write!(f, "failed to parse an int: {}", error), - FromStrError::Address(error) => { - write!(f, "address portion is the wrong length: {}", error) - } - } - } -} - -/// Represents an unforgeable reference, containing an address in the network's global storage and -/// the [`AccessRights`] of the reference. -/// -/// A `URef` can be used to index entities such as [`CLValue`](crate::CLValue)s, or smart contracts. -#[derive(Copy, Clone, Hash, PartialEq, Eq, PartialOrd, Ord, Default)] -#[cfg_attr(feature = "datasize", derive(DataSize))] -pub struct URef(URefAddr, AccessRights); - -impl URef { - /// Constructs a [`URef`] from an address and access rights. - pub const fn new(address: URefAddr, access_rights: AccessRights) -> Self { - URef(address, access_rights) - } - - /// Returns the address of this [`URef`]. - pub fn addr(&self) -> URefAddr { - self.0 - } - - /// Returns the access rights of this [`URef`]. - pub fn access_rights(&self) -> AccessRights { - self.1 - } - - /// Returns a new [`URef`] with the same address and updated access rights. - #[must_use] - pub fn with_access_rights(self, access_rights: AccessRights) -> Self { - URef(self.0, access_rights) - } - - /// Removes the access rights from this [`URef`]. - #[must_use] - pub fn remove_access_rights(self) -> Self { - URef(self.0, AccessRights::NONE) - } - - /// Returns `true` if the access rights are `Some` and - /// [`is_readable`](AccessRights::is_readable) is `true` for them. - #[must_use] - pub fn is_readable(self) -> bool { - self.1.is_readable() - } - - /// Returns a new [`URef`] with the same address and [`AccessRights::READ`] permission. - #[must_use] - pub fn into_read(self) -> URef { - URef(self.0, AccessRights::READ) - } - - /// Returns a new [`URef`] with the same address and [`AccessRights::WRITE`] permission. - #[must_use] - pub fn into_write(self) -> URef { - URef(self.0, AccessRights::WRITE) - } - - /// Returns a new [`URef`] with the same address and [`AccessRights::ADD`] permission. - #[must_use] - pub fn into_add(self) -> URef { - URef(self.0, AccessRights::ADD) - } - - /// Returns a new [`URef`] with the same address and [`AccessRights::READ_ADD_WRITE`] - /// permission. - #[must_use] - pub fn into_read_add_write(self) -> URef { - URef(self.0, AccessRights::READ_ADD_WRITE) - } - - /// Returns a new [`URef`] with the same address and [`AccessRights::READ_WRITE`] - /// permission. - #[must_use] - pub fn into_read_write(self) -> URef { - URef(self.0, AccessRights::READ_WRITE) - } - - /// Returns `true` if the access rights are `Some` and - /// [`is_writeable`](AccessRights::is_writeable) is `true` for them. - pub fn is_writeable(self) -> bool { - self.1.is_writeable() - } - - /// Returns `true` if the access rights are `Some` and [`is_addable`](AccessRights::is_addable) - /// is `true` for them. - pub fn is_addable(self) -> bool { - self.1.is_addable() - } - - /// Formats the address and access rights of the [`URef`] in a unique way that could be used as - /// a name when storing the given `URef` in a global state. - pub fn to_formatted_string(self) -> String { - // Extract bits as numerical value, with no flags marked as 0. - let access_rights_bits = self.access_rights().bits(); - // Access rights is represented as octal, which means that max value of u8 can - // be represented as maximum of 3 octal digits. - format!( - "{}{}-{:03o}", - UREF_FORMATTED_STRING_PREFIX, - base16::encode_lower(&self.addr()), - access_rights_bits - ) - } - - /// Parses a string formatted as per `Self::to_formatted_string()` into a `URef`. - pub fn from_formatted_str(input: &str) -> Result { - let remainder = input - .strip_prefix(UREF_FORMATTED_STRING_PREFIX) - .ok_or(FromStrError::InvalidPrefix)?; - let parts = remainder.splitn(2, '-').collect::>(); - if parts.len() != 2 { - return Err(FromStrError::MissingSuffix); - } - let addr = URefAddr::try_from(checksummed_hex::decode(parts[0])?.as_ref())?; - let access_rights_value = u8::from_str_radix(parts[1], 8)?; - let access_rights = AccessRights::from_bits(access_rights_value) - .ok_or(FromStrError::InvalidAccessRights)?; - Ok(URef(addr, access_rights)) - } - - /// Removes specific access rights from this URef if present. - pub fn disable_access_rights(&mut self, access_rights: AccessRights) { - self.1.remove(access_rights) - } -} - -#[cfg(feature = "json-schema")] -impl JsonSchema for URef { - fn schema_name() -> String { - String::from("URef") - } - - fn json_schema(gen: &mut SchemaGenerator) -> Schema { - let schema = gen.subschema_for::(); - let mut schema_object = schema.into_object(); - schema_object.metadata().description = Some(String::from("Hex-encoded, formatted URef.")); - schema_object.into() - } -} - -impl Display for URef { - fn fmt(&self, f: &mut Formatter) -> fmt::Result { - let addr = self.addr(); - let access_rights = self.access_rights(); - write!( - f, - "URef({}, {})", - base16::encode_lower(&addr), - access_rights - ) - } -} - -impl Debug for URef { - fn fmt(&self, f: &mut Formatter) -> fmt::Result { - write!(f, "{}", self) - } -} - -impl bytesrepr::ToBytes for URef { - fn to_bytes(&self) -> Result, Error> { - let mut result = bytesrepr::unchecked_allocate_buffer(self); - result.append(&mut self.0.to_bytes()?); - result.append(&mut self.1.to_bytes()?); - Ok(result) - } - - fn serialized_length(&self) -> usize { - UREF_SERIALIZED_LENGTH - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), self::Error> { - writer.extend_from_slice(&self.0); - self.1.write_bytes(writer)?; - Ok(()) - } -} - -impl FromBytes for URef { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (id, rem) = FromBytes::from_bytes(bytes)?; - let (access_rights, rem) = FromBytes::from_bytes(rem)?; - Ok((URef(id, access_rights), rem)) - } -} - -impl Serialize for URef { - fn serialize(&self, serializer: S) -> Result { - if serializer.is_human_readable() { - self.to_formatted_string().serialize(serializer) - } else { - (self.0, self.1).serialize(serializer) - } - } -} - -impl<'de> Deserialize<'de> for URef { - fn deserialize>(deserializer: D) -> Result { - if deserializer.is_human_readable() { - let formatted_string = String::deserialize(deserializer)?; - URef::from_formatted_str(&formatted_string).map_err(D::Error::custom) - } else { - let (address, access_rights) = <(URefAddr, AccessRights)>::deserialize(deserializer)?; - Ok(URef(address, access_rights)) - } - } -} - -impl TryFrom for URef { - type Error = ApiError; - - fn try_from(key: Key) -> Result { - if let Key::URef(uref) = key { - Ok(uref) - } else { - Err(ApiError::UnexpectedKeyVariant) - } - } -} - -impl Distribution for Standard { - fn sample(&self, rng: &mut R) -> URef { - URef::new(rng.gen(), rng.gen()) - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn uref_as_string() { - // Since we are putting URefs to named_keys map keyed by the label that - // `as_string()` returns, any changes to the string representation of - // that type cannot break the format. - let addr_array = [0u8; 32]; - let uref_a = URef::new(addr_array, AccessRights::READ); - assert_eq!( - uref_a.to_formatted_string(), - "uref-0000000000000000000000000000000000000000000000000000000000000000-001" - ); - let uref_b = URef::new(addr_array, AccessRights::WRITE); - assert_eq!( - uref_b.to_formatted_string(), - "uref-0000000000000000000000000000000000000000000000000000000000000000-002" - ); - - let uref_c = uref_b.remove_access_rights(); - assert_eq!( - uref_c.to_formatted_string(), - "uref-0000000000000000000000000000000000000000000000000000000000000000-000" - ); - } - - fn round_trip(uref: URef) { - let string = uref.to_formatted_string(); - let parsed_uref = URef::from_formatted_str(&string).unwrap(); - assert_eq!(uref, parsed_uref); - } - - #[test] - fn uref_from_str() { - round_trip(URef::new([0; 32], AccessRights::NONE)); - round_trip(URef::new([255; 32], AccessRights::READ_ADD_WRITE)); - - let invalid_prefix = - "ref-0000000000000000000000000000000000000000000000000000000000000000-000"; - assert!(URef::from_formatted_str(invalid_prefix).is_err()); - - let invalid_prefix = - "uref0000000000000000000000000000000000000000000000000000000000000000-000"; - assert!(URef::from_formatted_str(invalid_prefix).is_err()); - - let short_addr = "uref-00000000000000000000000000000000000000000000000000000000000000-000"; - assert!(URef::from_formatted_str(short_addr).is_err()); - - let long_addr = - "uref-000000000000000000000000000000000000000000000000000000000000000000-000"; - assert!(URef::from_formatted_str(long_addr).is_err()); - - let invalid_hex = - "uref-000000000000000000000000000000000000000000000000000000000000000g-000"; - assert!(URef::from_formatted_str(invalid_hex).is_err()); - - let invalid_suffix_separator = - "uref-0000000000000000000000000000000000000000000000000000000000000000:000"; - assert!(URef::from_formatted_str(invalid_suffix_separator).is_err()); - - let invalid_suffix = - "uref-0000000000000000000000000000000000000000000000000000000000000000-abc"; - assert!(URef::from_formatted_str(invalid_suffix).is_err()); - - let invalid_access_rights = - "uref-0000000000000000000000000000000000000000000000000000000000000000-200"; - assert!(URef::from_formatted_str(invalid_access_rights).is_err()); - } - - #[test] - fn serde_roundtrip() { - let uref = URef::new([255; 32], AccessRights::READ_ADD_WRITE); - let serialized = bincode::serialize(&uref).unwrap(); - let decoded = bincode::deserialize(&serialized).unwrap(); - assert_eq!(uref, decoded); - } - - #[test] - fn json_roundtrip() { - let uref = URef::new([255; 32], AccessRights::READ_ADD_WRITE); - let json_string = serde_json::to_string_pretty(&uref).unwrap(); - let decoded = serde_json::from_str(&json_string).unwrap(); - assert_eq!(uref, decoded); - } - - #[test] - fn should_disable_access_rights() { - let mut uref = URef::new([255; 32], AccessRights::READ_ADD_WRITE); - assert!(uref.is_writeable()); - uref.disable_access_rights(AccessRights::WRITE); - assert_eq!(uref.access_rights(), AccessRights::READ_ADD); - - uref.disable_access_rights(AccessRights::WRITE); - assert!( - !uref.is_writeable(), - "Disabling access bit twice should be a noop" - ); - - assert_eq!(uref.access_rights(), AccessRights::READ_ADD); - - uref.disable_access_rights(AccessRights::READ_ADD); - assert_eq!(uref.access_rights(), AccessRights::NONE); - - uref.disable_access_rights(AccessRights::READ_ADD); - assert_eq!(uref.access_rights(), AccessRights::NONE); - - uref.disable_access_rights(AccessRights::NONE); - assert_eq!(uref.access_rights(), AccessRights::NONE); - } -} diff --git a/casper_types_ver_2_0/src/validator_change.rs b/casper_types_ver_2_0/src/validator_change.rs deleted file mode 100644 index 92b66f8d..00000000 --- a/casper_types_ver_2_0/src/validator_change.rs +++ /dev/null @@ -1,101 +0,0 @@ -use crate::bytesrepr::{self, FromBytes, ToBytes}; -#[cfg(any(feature = "testing", test))] -use crate::testing::TestRng; -use alloc::vec::Vec; -#[cfg(feature = "datasize")] -use datasize::DataSize; -#[cfg(feature = "json-schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -/// A change to a validator's status between two eras. -#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Ord, PartialOrd)] -#[cfg_attr(feature = "json-schema", derive(JsonSchema))] -#[cfg_attr(feature = "datasize", derive(DataSize))] -pub enum ValidatorChange { - /// The validator got newly added to the validator set. - Added, - /// The validator was removed from the validator set. - Removed, - /// The validator was banned from this era. - Banned, - /// The validator was excluded from proposing new blocks in this era. - CannotPropose, - /// We saw the validator misbehave in this era. - SeenAsFaulty, -} - -impl ValidatorChange { - /// Returns a random `ValidatorChange`. - #[cfg(any(feature = "testing", test))] - pub fn random(rng: &mut TestRng) -> Self { - use rand::Rng; - - match rng.gen_range(0..5) { - ADDED_TAG => ValidatorChange::Added, - REMOVED_TAG => ValidatorChange::Removed, - BANNED_TAG => ValidatorChange::Banned, - CANNOT_PROPOSE_TAG => ValidatorChange::CannotPropose, - SEEN_AS_FAULTY_TAG => ValidatorChange::SeenAsFaulty, - _ => unreachable!(), - } - } -} - -const ADDED_TAG: u8 = 0; -const REMOVED_TAG: u8 = 1; -const BANNED_TAG: u8 = 2; -const CANNOT_PROPOSE_TAG: u8 = 3; -const SEEN_AS_FAULTY_TAG: u8 = 4; - -impl ToBytes for ValidatorChange { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - self.write_bytes(&mut buffer)?; - Ok(buffer) - } - - fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { - match self { - ValidatorChange::Added => ADDED_TAG, - ValidatorChange::Removed => REMOVED_TAG, - ValidatorChange::Banned => BANNED_TAG, - ValidatorChange::CannotPropose => CANNOT_PROPOSE_TAG, - ValidatorChange::SeenAsFaulty => SEEN_AS_FAULTY_TAG, - } - .write_bytes(writer) - } - - fn serialized_length(&self) -> usize { - bytesrepr::U8_SERIALIZED_LENGTH - } -} - -impl FromBytes for ValidatorChange { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { - let (tag, remainder) = u8::from_bytes(bytes)?; - let id = match tag { - ADDED_TAG => ValidatorChange::Added, - REMOVED_TAG => ValidatorChange::Removed, - BANNED_TAG => ValidatorChange::Banned, - CANNOT_PROPOSE_TAG => ValidatorChange::CannotPropose, - SEEN_AS_FAULTY_TAG => ValidatorChange::SeenAsFaulty, - _ => return Err(bytesrepr::Error::NotRepresentable), - }; - Ok((id, remainder)) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::testing::TestRng; - - #[test] - fn bytesrepr_roundtrip() { - let rng = &mut TestRng::new(); - - let val = ValidatorChange::random(rng); - bytesrepr::test_serialization_roundtrip(&val); - } -} diff --git a/casper_types_ver_2_0/tests/version_numbers.rs b/casper_types_ver_2_0/tests/version_numbers.rs deleted file mode 100644 index 5787cf50..00000000 --- a/casper_types_ver_2_0/tests/version_numbers.rs +++ /dev/null @@ -1,5 +0,0 @@ -#[cfg(feature = "version-sync")] -#[test] -fn test_html_root_url() { - version_sync::assert_html_root_url_updated!("src/lib.rs"); -} diff --git a/event_sidecar/src/database/postgresql_database/tests.rs b/event_sidecar/src/database/postgresql_database/tests.rs index a355b927..13336aba 100644 --- a/event_sidecar/src/database/postgresql_database/tests.rs +++ b/event_sidecar/src/database/postgresql_database/tests.rs @@ -37,43 +37,51 @@ async fn should_save_and_retrieve_block_added() { } #[tokio::test] -async fn should_save_and_retrieve_deploy_accepted() { +async fn should_save_and_retrieve_transaction_accepted() { let test_context = build_postgres_database().await.unwrap(); - crate::database::tests::should_save_and_retrieve_deploy_accepted(test_context.db.clone()).await; + crate::database::tests::should_save_and_retrieve_transaction_accepted(test_context.db.clone()) + .await; } #[tokio::test] -async fn should_save_and_retrieve_deploy_processed() { +async fn should_save_and_retrieve_transaction_processed() { let test_context = build_postgres_database().await.unwrap(); - crate::database::tests::should_save_and_retrieve_deploy_processed(test_context.db.clone()) + crate::database::tests::should_save_and_retrieve_transaction_processed(test_context.db.clone()) .await; } #[tokio::test] -async fn should_save_and_retrieve_deploy_expired() { +async fn should_save_and_retrieve_transaction_expired() { let test_context = build_postgres_database().await.unwrap(); - crate::database::tests::should_save_and_retrieve_deploy_expired(test_context.db.clone()).await; + crate::database::tests::should_save_and_retrieve_transaction_expired(test_context.db.clone()) + .await; } #[tokio::test] -async fn should_retrieve_deploy_aggregate_of_accepted() { +async fn should_retrieve_transaction_aggregate_of_accepted() { let test_context = build_postgres_database().await.unwrap(); - crate::database::tests::should_retrieve_deploy_aggregate_of_accepted(test_context.db.clone()) - .await; + crate::database::tests::should_retrieve_transaction_aggregate_of_accepted( + test_context.db.clone(), + ) + .await; } #[tokio::test] -async fn should_retrieve_deploy_aggregate_of_processed() { +async fn should_retrieve_transaction_aggregate_of_processed() { let test_context = build_postgres_database().await.unwrap(); - crate::database::tests::should_retrieve_deploy_aggregate_of_processed(test_context.db.clone()) - .await; + crate::database::tests::should_retrieve_transaction_aggregate_of_processed( + test_context.db.clone(), + ) + .await; } #[tokio::test] -async fn should_retrieve_deploy_aggregate_of_expired() { +async fn should_retrieve_transaction_aggregate_of_expired() { let test_context = build_postgres_database().await.unwrap(); - crate::database::tests::should_retrieve_deploy_aggregate_of_expired(test_context.db.clone()) - .await; + crate::database::tests::should_retrieve_transaction_aggregate_of_expired( + test_context.db.clone(), + ) + .await; } #[tokio::test] @@ -126,27 +134,27 @@ async fn should_disallow_insert_of_existing_block_added() { } #[tokio::test] -async fn should_disallow_insert_of_existing_deploy_accepted() { +async fn should_disallow_insert_of_existing_transaction_accepted() { let test_context = build_postgres_database().await.unwrap(); - crate::database::tests::should_disallow_insert_of_existing_deploy_accepted( + crate::database::tests::should_disallow_insert_of_existing_transaction_accepted( test_context.db.clone(), ) .await; } #[tokio::test] -async fn should_disallow_insert_of_existing_deploy_expired() { +async fn should_disallow_insert_of_existing_transaction_expired() { let test_context = build_postgres_database().await.unwrap(); - crate::database::tests::should_disallow_insert_of_existing_deploy_expired( + crate::database::tests::should_disallow_insert_of_existing_transaction_expired( test_context.db.clone(), ) .await; } #[tokio::test] -async fn should_disallow_insert_of_existing_deploy_processed() { +async fn should_disallow_insert_of_existing_transaction_processed() { let test_context = build_postgres_database().await.unwrap(); - crate::database::tests::should_disallow_insert_of_existing_deploy_processed( + crate::database::tests::should_disallow_insert_of_existing_transaction_processed( test_context.db.clone(), ) .await; @@ -207,17 +215,17 @@ async fn should_save_block_added_with_correct_event_type_id() { } #[tokio::test] -async fn should_save_deploy_accepted_with_correct_event_type_id() { +async fn should_save_transaction_accepted_with_correct_event_type_id() { let mut test_rng = TestRng::new(); let test_context = build_postgres_database().await.unwrap(); let db = &test_context.db; - let deploy_accepted = DeployAccepted::random(&mut test_rng); + let transaction_accepted = TransactionAccepted::random(&mut test_rng); assert!(db - .save_deploy_accepted( - deploy_accepted, + .save_transaction_accepted( + transaction_accepted, 1, "127.0.0.1".to_string(), "1.5.5".to_string() @@ -240,22 +248,22 @@ async fn should_save_deploy_accepted_with_correct_event_type_id() { .try_get::(1) .expect("Error getting api_version from row"); - assert_eq!(event_type_id, EventTypeId::DeployAccepted as i16); + assert_eq!(event_type_id, EventTypeId::TransactionAccepted as i16); assert_eq!(api_version, "1.5.5".to_string()); } #[tokio::test] -async fn should_save_deploy_processed_with_correct_event_type_id() { +async fn should_save_transaction_processed_with_correct_event_type_id() { let mut test_rng = TestRng::new(); let test_context = build_postgres_database().await.unwrap(); let db = &test_context.db; - let deploy_processed = DeployProcessed::random(&mut test_rng, None); + let transaction_processed = TransactionProcessed::random(&mut test_rng, None); assert!(db - .save_deploy_processed( - deploy_processed, + .save_transaction_processed( + transaction_processed, 1, "127.0.0.1".to_string(), "1.1.1".to_string() @@ -275,21 +283,21 @@ async fn should_save_deploy_processed_with_correct_event_type_id() { .try_get::(0) .expect("Error getting event_type_id from row"); - assert_eq!(event_type_id, EventTypeId::DeployProcessed as i16) + assert_eq!(event_type_id, EventTypeId::TransactionProcessed as i16) } #[tokio::test] -async fn should_save_deploy_expired_with_correct_event_type_id() { +async fn should_save_transaction_expired_with_correct_event_type_id() { let mut test_rng = TestRng::new(); let test_context = build_postgres_database().await.unwrap(); let db = &test_context.db; - let deploy_expired = DeployExpired::random(&mut test_rng, None); + let transaction_expired = TransactionExpired::random(&mut test_rng, None); assert!(db - .save_deploy_expired( - deploy_expired, + .save_transaction_expired( + transaction_expired, 1, "127.0.0.1".to_string(), "1.1.1".to_string() @@ -309,7 +317,7 @@ async fn should_save_deploy_expired_with_correct_event_type_id() { .try_get::(0) .expect("Error getting event_type_id from row"); - assert_eq!(event_type_id, EventTypeId::DeployExpired as i16) + assert_eq!(event_type_id, EventTypeId::TransactionExpired as i16) } #[tokio::test] diff --git a/event_sidecar/src/database/reader_generator.rs b/event_sidecar/src/database/reader_generator.rs index 53d63a14..a9cbf79f 100644 --- a/event_sidecar/src/database/reader_generator.rs +++ b/event_sidecar/src/database/reader_generator.rs @@ -5,14 +5,16 @@ macro_rules! database_reader_implementation { $query_materializer_expr:expr) => { use anyhow::Error; use async_trait::async_trait; - use casper_event_types::FinalitySignature as FinSig; + use casper_types::FinalitySignature as FinSig; use serde::Deserialize; use sqlx::{Executor, Row}; use $crate::{ database::errors::{wrap_query_error, DbError}, sql::tables, types::{ - database::{DatabaseReadError, DatabaseReader, DeployAggregate}, + database::{ + DatabaseReadError, DatabaseReader, TransactionAggregate, TransactionTypeId, + }, sse_events::*, }, }; @@ -59,43 +61,52 @@ macro_rules! database_reader_implementation { }) } - async fn get_deploy_aggregate_by_hash( + async fn get_transaction_aggregate_by_identifier( &self, + transaction_type: &TransactionTypeId, hash: &str, - ) -> Result { + ) -> Result { // We may return here with NotFound because if there's no accepted record then theoretically there should be no other records for the given hash. - let deploy_accepted = self.get_deploy_accepted_by_hash(hash).await?; - - // However we handle the Err case for DeployProcessed explicitly as we don't want to return NotFound when we've got a DeployAccepted to return - match self.get_deploy_processed_by_hash(hash).await { - Ok(deploy_processed) => Ok(DeployAggregate { - deploy_hash: hash.to_string(), - deploy_accepted: Some(deploy_accepted), - deploy_processed: Some(deploy_processed), - deploy_expired: false, + let transaction_accepted = self + .get_transaction_accepted_by_hash(transaction_type.clone(), hash) + .await?; + + // However we handle the Err case for TransactionProcessed explicitly as we don't want to return NotFound when we've got a TransactionAccepted to return + match self + .get_transaction_processed_by_hash(transaction_type, hash) + .await + { + Ok(transaction_processed) => Ok(TransactionAggregate { + transaction_hash: hash.to_string(), + transaction_accepted: Some(transaction_accepted), + transaction_processed: Some(transaction_processed), + transaction_expired: false, }), Err(err) => { // If the error is anything other than NotFound return the error. if !matches!(DatabaseReadError::NotFound, _err) { return Err(err); } - match self.get_deploy_expired_by_hash(hash).await { - Ok(_) => Ok(DeployAggregate { - deploy_hash: hash.to_string(), - deploy_accepted: Some(deploy_accepted), - deploy_processed: None, - deploy_expired: true, + match self + .get_transaction_expired_by_hash(transaction_type, hash) + .await + { + Ok(_) => Ok(TransactionAggregate { + transaction_hash: hash.to_string(), + transaction_accepted: Some(transaction_accepted), + transaction_processed: None, + transaction_expired: true, }), Err(err) => { // If the error is anything other than NotFound return the error. if !matches!(DatabaseReadError::NotFound, _err) { return Err(err); } - Ok(DeployAggregate { - deploy_hash: hash.to_string(), - deploy_accepted: Some(deploy_accepted), - deploy_processed: None, - deploy_expired: false, + Ok(TransactionAggregate { + transaction_hash: hash.to_string(), + transaction_accepted: Some(transaction_accepted), + transaction_processed: None, + transaction_expired: false, }) } } @@ -103,14 +114,18 @@ macro_rules! database_reader_implementation { } } - async fn get_deploy_accepted_by_hash( + async fn get_transaction_accepted_by_hash( &self, + transaction_type: &TransactionTypeId, hash: &str, - ) -> Result { + ) -> Result { let db_connection = &self.connection_pool; - let stmt = tables::deploy_accepted::create_get_by_hash_stmt(hash.to_string()) - .to_string($query_materializer_expr); + let stmt = tables::transaction_accepted::create_get_by_hash_stmt( + transaction_type.into(), + hash.to_string(), + ) + .to_string($query_materializer_expr); db_connection .fetch_optional(stmt.as_str()) @@ -122,19 +137,23 @@ macro_rules! database_reader_implementation { let raw = row .try_get::("raw") .map_err(|error| wrap_query_error(error.into()))?; - deserialize_data::(&raw).map_err(wrap_query_error) + deserialize_data::(&raw).map_err(wrap_query_error) } }) } - async fn get_deploy_processed_by_hash( + async fn get_transaction_processed_by_hash( &self, + transaction_type: &TransactionTypeId, hash: &str, - ) -> Result { + ) -> Result { let db_connection = &self.connection_pool; - let stmt = tables::deploy_processed::create_get_by_hash_stmt(hash.to_string()) - .to_string($query_materializer_expr); + let stmt = tables::transaction_processed::create_get_by_hash_stmt( + transaction_type.into(), + hash.to_string(), + ) + .to_string($query_materializer_expr); db_connection .fetch_optional(stmt.as_str()) @@ -146,19 +165,23 @@ macro_rules! database_reader_implementation { let raw = row .try_get::("raw") .map_err(|sqlx_error| wrap_query_error(sqlx_error.into()))?; - deserialize_data::(&raw).map_err(wrap_query_error) + deserialize_data::(&raw).map_err(wrap_query_error) } }) } - async fn get_deploy_expired_by_hash( + async fn get_transaction_expired_by_hash( &self, + transaction_type: &TransactionTypeId, hash: &str, - ) -> Result { + ) -> Result { let db_connection = &self.connection_pool; - let stmt = tables::deploy_expired::create_get_by_hash_stmt(hash.to_string()) - .to_string($query_materializer_expr); + let stmt = tables::transaction_expired::create_get_by_hash_stmt( + transaction_type.into(), + hash.to_string(), + ) + .to_string($query_materializer_expr); db_connection .fetch_optional(stmt.as_str()) @@ -170,7 +193,7 @@ macro_rules! database_reader_implementation { let raw = row .try_get::("raw") .map_err(|sqlx_error| wrap_query_error(sqlx_error.into()))?; - deserialize_data::(&raw).map_err(wrap_query_error) + deserialize_data::(&raw).map_err(wrap_query_error) } }) } diff --git a/event_sidecar/src/database/sqlite_database/tests.rs b/event_sidecar/src/database/sqlite_database/tests.rs index 1a0aa598..fb946ab6 100644 --- a/event_sidecar/src/database/sqlite_database/tests.rs +++ b/event_sidecar/src/database/sqlite_database/tests.rs @@ -50,39 +50,39 @@ async fn should_save_and_retrieve_block_added() { } #[tokio::test] -async fn should_save_and_retrieve_deploy_accepted() { +async fn should_save_and_retrieve_transaction_accepted() { let sqlite_db = build_database().await; - crate::database::tests::should_save_and_retrieve_deploy_accepted(sqlite_db).await; + crate::database::tests::should_save_and_retrieve_transaction_accepted(sqlite_db).await; } #[tokio::test] -async fn should_save_and_retrieve_deploy_processed() { +async fn should_save_and_retrieve_transaction_processed() { let sqlite_db = build_database().await; - crate::database::tests::should_save_and_retrieve_deploy_processed(sqlite_db).await; + crate::database::tests::should_save_and_retrieve_transaction_processed(sqlite_db).await; } #[tokio::test] -async fn should_save_and_retrieve_deploy_expired() { +async fn should_save_and_retrieve_transaction_expired() { let sqlite_db = build_database().await; - crate::database::tests::should_save_and_retrieve_deploy_expired(sqlite_db).await; + crate::database::tests::should_save_and_retrieve_transaction_expired(sqlite_db).await; } #[tokio::test] -async fn should_retrieve_deploy_aggregate_of_accepted() { +async fn should_retrieve_transaction_aggregate_of_accepted() { let sqlite_db = build_database().await; - crate::database::tests::should_retrieve_deploy_aggregate_of_accepted(sqlite_db).await; + crate::database::tests::should_retrieve_transaction_aggregate_of_accepted(sqlite_db).await; } #[tokio::test] -async fn should_retrieve_deploy_aggregate_of_processed() { +async fn should_retrieve_transaction_aggregate_of_processed() { let sqlite_db = build_database().await; - crate::database::tests::should_retrieve_deploy_aggregate_of_processed(sqlite_db).await; + crate::database::tests::should_retrieve_transaction_aggregate_of_processed(sqlite_db).await; } #[tokio::test] -async fn should_retrieve_deploy_aggregate_of_expired() { +async fn should_retrieve_transaction_aggregate_of_expired() { let sqlite_db = build_database().await; - crate::database::tests::should_retrieve_deploy_aggregate_of_expired(sqlite_db).await; + crate::database::tests::should_retrieve_transaction_aggregate_of_expired(sqlite_db).await; } #[tokio::test] @@ -128,21 +128,23 @@ async fn should_disallow_insert_of_existing_block_added() { } #[tokio::test] -async fn should_disallow_insert_of_existing_deploy_accepted() { +async fn should_disallow_insert_of_existing_transaction_accepted() { let sqlite_db = build_database().await; - crate::database::tests::should_disallow_insert_of_existing_deploy_accepted(sqlite_db).await; + crate::database::tests::should_disallow_insert_of_existing_transaction_accepted(sqlite_db) + .await; } #[tokio::test] -async fn should_disallow_insert_of_existing_deploy_expired() { +async fn should_disallow_insert_of_existing_transaction_expired() { let sqlite_db = build_database().await; - crate::database::tests::should_disallow_insert_of_existing_deploy_expired(sqlite_db).await; + crate::database::tests::should_disallow_insert_of_existing_transaction_expired(sqlite_db).await; } #[tokio::test] -async fn should_disallow_insert_of_existing_deploy_processed() { +async fn should_disallow_insert_of_existing_transaction_processed() { let sqlite_db = build_database().await; - crate::database::tests::should_disallow_insert_of_existing_deploy_processed(sqlite_db).await; + crate::database::tests::should_disallow_insert_of_existing_transaction_processed(sqlite_db) + .await; } #[tokio::test] @@ -192,16 +194,16 @@ async fn should_save_block_added_with_correct_event_type_id() { } #[tokio::test] -async fn should_save_deploy_accepted_with_correct_event_type_id() { +async fn should_save_transaction_accepted_with_correct_event_type_id() { let mut test_rng = TestRng::new(); let sqlite_db = build_database().await; - let deploy_accepted = DeployAccepted::random(&mut test_rng); + let transaction_accepted = TransactionAccepted::random(&mut test_rng); assert!(sqlite_db - .save_deploy_accepted( - deploy_accepted, + .save_transaction_accepted( + transaction_accepted, 1, "127.0.0.1".to_string(), "1.5.5".to_string() @@ -224,21 +226,21 @@ async fn should_save_deploy_accepted_with_correct_event_type_id() { .try_get::(1) .expect("Error getting api_version from row"); - assert_eq!(event_type_id, EventTypeId::DeployAccepted as i16); + assert_eq!(event_type_id, EventTypeId::TransactionAccepted as i16); assert_eq!(api_version, "1.5.5".to_string()); } #[tokio::test] -async fn should_save_deploy_processed_with_correct_event_type_id() { +async fn should_save_transaction_processed_with_correct_event_type_id() { let mut test_rng = TestRng::new(); let sqlite_db = build_database().await; - let deploy_processed = DeployProcessed::random(&mut test_rng, None); + let transaction_processed = TransactionProcessed::random(&mut test_rng, None); assert!(sqlite_db - .save_deploy_processed( - deploy_processed, + .save_transaction_processed( + transaction_processed, 1, "127.0.0.1".to_string(), "1.1.1".to_string() @@ -258,20 +260,20 @@ async fn should_save_deploy_processed_with_correct_event_type_id() { .try_get::(0) .expect("Error getting event_type_id from row"); - assert_eq!(event_type_id, EventTypeId::DeployProcessed as i16) + assert_eq!(event_type_id, EventTypeId::TransactionProcessed as i16) } #[tokio::test] -async fn should_save_deploy_expired_with_correct_event_type_id() { +async fn should_save_transaction_expired_with_correct_event_type_id() { let mut test_rng = TestRng::new(); let sqlite_db = build_database().await; - let deploy_expired = DeployExpired::random(&mut test_rng, None); + let transaction_expired = TransactionExpired::random(&mut test_rng, None); assert!(sqlite_db - .save_deploy_expired( - deploy_expired, + .save_transaction_expired( + transaction_expired, 1, "127.0.0.1".to_string(), "1.1.1".to_string() @@ -291,7 +293,7 @@ async fn should_save_deploy_expired_with_correct_event_type_id() { .try_get::(0) .expect("Error getting event_type_id from row"); - assert_eq!(event_type_id, EventTypeId::DeployExpired as i16) + assert_eq!(event_type_id, EventTypeId::TransactionExpired as i16) } #[tokio::test] diff --git a/event_sidecar/src/database/tests.rs b/event_sidecar/src/database/tests.rs index e8e0fb77..e0385256 100644 --- a/event_sidecar/src/database/tests.rs +++ b/event_sidecar/src/database/tests.rs @@ -1,5 +1,5 @@ use crate::types::{ - database::{DatabaseReader, DatabaseWriteError, DatabaseWriter}, + database::{DatabaseReader, DatabaseWriteError, DatabaseWriter, TransactionTypeId}, sse_events::*, }; use casper_types::{testing::TestRng, AsymmetricType, EraId}; @@ -31,143 +31,217 @@ pub async fn should_save_and_retrieve_block_added(db: DB) { +pub async fn should_save_and_retrieve_transaction_accepted( + db: DB, +) { let mut test_rng = TestRng::new(); - let deploy_accepted = DeployAccepted::random(&mut test_rng); - - db.save_deploy_accepted( - deploy_accepted.clone(), + let transaction_accepted = TransactionAccepted::random(&mut test_rng); + let transaction_type_id = match transaction_accepted.transaction_type_id() { + crate::sql::tables::transaction_type::TransactionTypeId::Deploy => { + TransactionTypeId::Deploy + } + crate::sql::tables::transaction_type::TransactionTypeId::Version1 => { + TransactionTypeId::Version1 + } + }; + db.save_transaction_accepted( + transaction_accepted.clone(), 1, "127.0.0.1".to_string(), "1.1.1".to_string(), ) .await - .expect("Error saving deploy_accepted"); + .expect("Error saving transaction_accepted"); - db.get_deploy_accepted_by_hash(&deploy_accepted.hex_encoded_hash()) - .await - .expect("Error getting deploy_accepted by hash"); + db.get_transaction_accepted_by_hash( + &transaction_type_id, + &transaction_accepted.hex_encoded_hash(), + ) + .await + .expect("Error getting transaction_accepted by hash"); } -pub async fn should_save_and_retrieve_deploy_processed( +pub async fn should_save_and_retrieve_transaction_processed( db: DB, ) { let mut test_rng = TestRng::new(); - let deploy_processed = DeployProcessed::random(&mut test_rng, None); - - db.save_deploy_processed( - deploy_processed.clone(), + let transaction_processed = TransactionProcessed::random(&mut test_rng, None); + let transaction_type_id = match transaction_processed.transaction_type_id() { + crate::sql::tables::transaction_type::TransactionTypeId::Deploy => { + TransactionTypeId::Deploy + } + crate::sql::tables::transaction_type::TransactionTypeId::Version1 => { + TransactionTypeId::Version1 + } + }; + db.save_transaction_processed( + transaction_processed.clone(), 1, "127.0.0.1".to_string(), "1.1.1".to_string(), ) .await - .expect("Error saving deploy_processed"); + .expect("Error saving transaction_processed"); - db.get_deploy_processed_by_hash(&deploy_processed.hex_encoded_hash()) - .await - .expect("Error getting deploy_processed by hash"); + db.get_transaction_processed_by_hash( + &transaction_type_id, + &transaction_processed.hex_encoded_hash(), + ) + .await + .expect("Error getting transaction_processed by hash"); } -pub async fn should_save_and_retrieve_deploy_expired(db: DB) { +pub async fn should_save_and_retrieve_transaction_expired( + db: DB, +) { let mut test_rng = TestRng::new(); - let deploy_expired = DeployExpired::random(&mut test_rng, None); - - db.save_deploy_expired( - deploy_expired.clone(), + let transaction_expired = TransactionExpired::random(&mut test_rng, None); + let transaction_type_id = match transaction_expired.transaction_type_id() { + crate::sql::tables::transaction_type::TransactionTypeId::Deploy => { + TransactionTypeId::Deploy + } + crate::sql::tables::transaction_type::TransactionTypeId::Version1 => { + TransactionTypeId::Version1 + } + }; + db.save_transaction_expired( + transaction_expired.clone(), 1, "127.0.0.1".to_string(), "1.1.1".to_string(), ) .await - .expect("Error saving deploy_expired"); + .expect("Error saving transaction_expired"); - db.get_deploy_expired_by_hash(&deploy_expired.hex_encoded_hash()) - .await - .expect("Error getting deploy_expired by hash"); + db.get_transaction_expired_by_hash( + &transaction_type_id, + &transaction_expired.hex_encoded_hash(), + ) + .await + .expect("Error getting transaction_expired by hash"); } -pub async fn should_retrieve_deploy_aggregate_of_accepted( +pub async fn should_retrieve_transaction_aggregate_of_accepted< + DB: DatabaseReader + DatabaseWriter, +>( db: DB, ) { let mut test_rng = TestRng::new(); - let deploy_accepted = DeployAccepted::random(&mut test_rng); - - db.save_deploy_accepted( - deploy_accepted.clone(), + let transaction_accepted = TransactionAccepted::random(&mut test_rng); + let transaction_type_id = match transaction_accepted.transaction_type_id() { + crate::sql::tables::transaction_type::TransactionTypeId::Deploy => { + TransactionTypeId::Deploy + } + crate::sql::tables::transaction_type::TransactionTypeId::Version1 => { + TransactionTypeId::Version1 + } + }; + + db.save_transaction_accepted( + transaction_accepted.clone(), 1, "127.0.0.1".to_string(), "1.1.1".to_string(), ) .await - .expect("Error saving deploy_accepted"); + .expect("Error saving transaction_accepted"); - db.get_deploy_aggregate_by_hash(&deploy_accepted.hex_encoded_hash()) - .await - .expect("Error getting deploy aggregate by hash"); + db.get_transaction_aggregate_by_identifier( + &transaction_type_id, + &transaction_accepted.hex_encoded_hash(), + ) + .await + .expect("Error getting transaction aggregate by hash"); } -pub async fn should_retrieve_deploy_aggregate_of_processed( +pub async fn should_retrieve_transaction_aggregate_of_processed< + DB: DatabaseReader + DatabaseWriter, +>( db: DB, ) { let mut test_rng = TestRng::new(); - let deploy_accepted = DeployAccepted::random(&mut test_rng); - let deploy_processed = - DeployProcessed::random(&mut test_rng, Some(deploy_accepted.deploy_hash())); - - db.save_deploy_accepted( - deploy_accepted.clone(), + let transaction_accepted = TransactionAccepted::random(&mut test_rng); + let transaction_processed = + TransactionProcessed::random(&mut test_rng, Some(transaction_accepted.transaction_hash())); + let transaction_type_id = match transaction_accepted.transaction_type_id() { + crate::sql::tables::transaction_type::TransactionTypeId::Deploy => { + TransactionTypeId::Deploy + } + crate::sql::tables::transaction_type::TransactionTypeId::Version1 => { + TransactionTypeId::Version1 + } + }; + + db.save_transaction_accepted( + transaction_accepted.clone(), 1, "127.0.0.1".to_string(), "1.1.1".to_string(), ) .await - .expect("Error saving deploy_accepted"); + .expect("Error saving transaction_accepted"); - db.save_deploy_processed( - deploy_processed, + db.save_transaction_processed( + transaction_processed, 2, "127.0.0.1".to_string(), "1.1.1".to_string(), ) .await - .expect("Error saving deploy_processed"); + .expect("Error saving transaction_processed"); - db.get_deploy_aggregate_by_hash(&deploy_accepted.hex_encoded_hash()) - .await - .expect("Error getting deploy aggregate by hash"); + db.get_transaction_aggregate_by_identifier( + &transaction_type_id, + &transaction_accepted.hex_encoded_hash(), + ) + .await + .expect("Error getting transaction aggregate by hash"); } -pub async fn should_retrieve_deploy_aggregate_of_expired( +pub async fn should_retrieve_transaction_aggregate_of_expired< + DB: DatabaseReader + DatabaseWriter, +>( db: DB, ) { let mut test_rng = TestRng::new(); - let deploy_accepted = DeployAccepted::random(&mut test_rng); - let deploy_expired = DeployExpired::random(&mut test_rng, Some(deploy_accepted.deploy_hash())); - - db.save_deploy_accepted( - deploy_accepted.clone(), + let transaction_accepted = TransactionAccepted::random(&mut test_rng); + let transaction_expired = + TransactionExpired::random(&mut test_rng, Some(transaction_accepted.transaction_hash())); + let transaction_type_id = match transaction_accepted.transaction_type_id() { + crate::sql::tables::transaction_type::TransactionTypeId::Deploy => { + TransactionTypeId::Deploy + } + crate::sql::tables::transaction_type::TransactionTypeId::Version1 => { + TransactionTypeId::Version1 + } + }; + + db.save_transaction_accepted( + transaction_accepted.clone(), 1, "127.0.0.1".to_string(), "1.1.1".to_string(), ) .await - .expect("Error saving deploy_accepted"); + .expect("Error saving transaction_accepted"); - db.save_deploy_expired( - deploy_expired, + db.save_transaction_expired( + transaction_expired, 2, "127.0.0.1".to_string(), "1.1.1".to_string(), ) .await - .expect("Error saving deploy_expired"); + .expect("Error saving transaction_expired"); - db.get_deploy_aggregate_by_hash(&deploy_accepted.hex_encoded_hash()) - .await - .expect("Error getting deploy aggregate by hash"); + db.get_transaction_aggregate_by_identifier( + &transaction_type_id, + &transaction_accepted.hex_encoded_hash(), + ) + .await + .expect("Error getting transaction aggregate by hash"); } pub async fn should_save_and_retrieve_fault(db: DB) { @@ -347,17 +421,17 @@ pub async fn should_disallow_insert_of_existing_block_added( db: DB, ) { let mut test_rng = TestRng::new(); - let deploy_accepted = DeployAccepted::random(&mut test_rng); + let transaction_accepted = TransactionAccepted::random(&mut test_rng); assert!(db - .save_deploy_accepted( - deploy_accepted.clone(), + .save_transaction_accepted( + transaction_accepted.clone(), 1, "127.0.0.1".to_string(), "1.1.1".to_string() @@ -366,8 +440,8 @@ pub async fn should_disallow_insert_of_existing_deploy_accepted< .is_ok()); let db_err = db - .save_deploy_accepted( - deploy_accepted, + .save_transaction_accepted( + transaction_accepted, 2, "127.0.0.1".to_string(), "1.1.1".to_string(), @@ -379,21 +453,21 @@ pub async fn should_disallow_insert_of_existing_deploy_accepted< // This check is to ensure that the UNIQUE constraint error is originating from the raw event table rather than the event_log if let DatabaseWriteError::UniqueConstraint(uc_err) = db_err { - assert_eq!(uc_err.table, "DeployAccepted") + assert_eq!(uc_err.table, "TransactionAccepted") } } -pub async fn should_disallow_insert_of_existing_deploy_expired< +pub async fn should_disallow_insert_of_existing_transaction_expired< DB: DatabaseReader + DatabaseWriter, >( db: DB, ) { let mut test_rng = TestRng::new(); - let deploy_expired = DeployExpired::random(&mut test_rng, None); + let transaction_expired = TransactionExpired::random(&mut test_rng, None); assert!(db - .save_deploy_expired( - deploy_expired.clone(), + .save_transaction_expired( + transaction_expired.clone(), 1, "127.0.0.1".to_string(), "1.1.1".to_string() @@ -402,8 +476,8 @@ pub async fn should_disallow_insert_of_existing_deploy_expired< .is_ok()); let db_err = db - .save_deploy_expired( - deploy_expired, + .save_transaction_expired( + transaction_expired, 2, "127.0.0.1".to_string(), "1.1.1".to_string(), @@ -415,21 +489,21 @@ pub async fn should_disallow_insert_of_existing_deploy_expired< // This check is to ensure that the UNIQUE constraint error is originating from the raw event table rather than the event_log if let DatabaseWriteError::UniqueConstraint(uc_err) = db_err { - assert_eq!(uc_err.table, "DeployExpired") + assert_eq!(uc_err.table, "TransactionExpired") } } -pub async fn should_disallow_insert_of_existing_deploy_processed< +pub async fn should_disallow_insert_of_existing_transaction_processed< DB: DatabaseReader + DatabaseWriter, >( db: DB, ) { let mut test_rng = TestRng::new(); - let deploy_processed = DeployProcessed::random(&mut test_rng, None); + let transaction_processed = TransactionProcessed::random(&mut test_rng, None); assert!(db - .save_deploy_processed( - deploy_processed.clone(), + .save_transaction_processed( + transaction_processed.clone(), 1, "127.0.0.1".to_string(), "1.1.1".to_string() @@ -438,8 +512,8 @@ pub async fn should_disallow_insert_of_existing_deploy_processed< .is_ok()); let db_err = db - .save_deploy_processed( - deploy_processed, + .save_transaction_processed( + transaction_processed, 2, "127.0.0.1".to_string(), "1.1.1".to_string(), @@ -451,7 +525,7 @@ pub async fn should_disallow_insert_of_existing_deploy_processed< // This check is to ensure that the UNIQUE constraint error is originating from the raw event table rather than the event_log if let DatabaseWriteError::UniqueConstraint(uc_err) = db_err { - assert_eq!(uc_err.table, "DeployProcessed") + assert_eq!(uc_err.table, "TransactionProcessed") } } diff --git a/event_sidecar/src/database/writer_generator.rs b/event_sidecar/src/database/writer_generator.rs index 21162f12..72444462 100644 --- a/event_sidecar/src/database/writer_generator.rs +++ b/event_sidecar/src/database/writer_generator.rs @@ -86,32 +86,34 @@ impl DatabaseWriter for $extended_type { res } - async fn save_deploy_accepted( + async fn save_transaction_accepted( &self, - deploy_accepted: DeployAccepted, + transaction_accepted: TransactionAccepted, event_id: u32, event_source_address: String, api_version: String, ) -> Result { #[cfg(feature = "additional-metrics")] let start = Instant::now(); + let transaction_type_id = transaction_accepted.transaction_type_id(); let mut transaction = self.get_transaction().await?; - let json = serde_json::to_string(&deploy_accepted)?; - let encoded_hash = deploy_accepted.hex_encoded_hash(); + let json = serde_json::to_string(&transaction_accepted)?; + let transaction_identifier = transaction_accepted.identifier(); let event_log_id = save_event_log( - EventTypeId::DeployAccepted as u8, + EventTypeId::TransactionAccepted as u8, &event_source_address, event_id, - &encoded_hash, + &transaction_identifier, &api_version, &mut transaction, ) .await?; + let transaction_type_id_raw = transaction_type_id as u8; let batched_insert_stmts = vec![ - tables::deploy_accepted::create_insert_stmt(encoded_hash.clone(), json, event_log_id)?, - tables::deploy_event::create_insert_stmt(event_log_id, encoded_hash)?, + tables::transaction_accepted::create_insert_stmt(transaction_type_id_raw, transaction_identifier.clone(), json, event_log_id)?, + tables::transaction_event::create_insert_stmt(event_log_id, transaction_type_id_raw, transaction_identifier)?, ] .iter() .map(|stmt| stmt.to_string($query_materializer_expr)) @@ -122,35 +124,37 @@ impl DatabaseWriter for $extended_type { transaction.commit().await?; } #[cfg(feature = "additional-metrics")] - observe_db_operation_time("save_deploy_accepted", start); + observe_db_operation_time("save_transaction_accepted", start); res } - async fn save_deploy_processed( + async fn save_transaction_processed( &self, - deploy_processed: DeployProcessed, + transaction_processed: TransactionProcessed, event_id: u32, event_source_address: String, api_version: String, ) -> Result { #[cfg(feature = "additional-metrics")] let start = Instant::now(); + let transaction_type_id = transaction_processed.transaction_type_id(); let mut transaction = self.get_transaction().await?; - let json = serde_json::to_string(&deploy_processed)?; - let encoded_hash = deploy_processed.hex_encoded_hash(); + let json = serde_json::to_string(&transaction_processed)?; + let identifier = transaction_processed.identifier(); let event_log_id = save_event_log( - EventTypeId::DeployProcessed as u8, + EventTypeId::TransactionProcessed as u8, &event_source_address, event_id, - &encoded_hash, + &identifier, &api_version, &mut transaction, ) .await?; + let transaction_type_id_raw = transaction_type_id as u8; let batched_insert_stmts = vec![ - tables::deploy_processed::create_insert_stmt(encoded_hash.clone(), json, event_log_id)?, - tables::deploy_event::create_insert_stmt(event_log_id, encoded_hash)?, + tables::transaction_processed::create_insert_stmt(transaction_type_id_raw, identifier.clone(), json, event_log_id)?, + tables::transaction_event::create_insert_stmt(event_log_id, transaction_type_id_raw, identifier)?, ] .iter() .map(|stmt| stmt.to_string($query_materializer_expr)) @@ -161,35 +165,37 @@ impl DatabaseWriter for $extended_type { transaction.commit().await?; } #[cfg(feature = "additional-metrics")] - observe_db_operation_time("save_deploy_processed", start); + observe_db_operation_time("save_transaction_processed", start); res } - async fn save_deploy_expired( + async fn save_transaction_expired( &self, - deploy_expired: DeployExpired, + transaction_expired: TransactionExpired, event_id: u32, event_source_address: String, api_version: String, ) -> Result { #[cfg(feature = "additional-metrics")] let start = Instant::now(); + let transaction_type_id = transaction_expired.transaction_type_id(); let mut transaction = self.get_transaction().await?; - let json = serde_json::to_string(&deploy_expired)?; - let encoded_hash = deploy_expired.hex_encoded_hash(); + let json = serde_json::to_string(&transaction_expired)?; + let transaction_identifier = transaction_expired.identifier(); let event_log_id = save_event_log( - EventTypeId::DeployExpired as u8, + EventTypeId::TransactionExpired as u8, &event_source_address, event_id, - &encoded_hash, + &transaction_identifier, &api_version, &mut transaction, ) .await?; + let transaction_type_id_raw = transaction_type_id as u8; let batched_insert_stmts = vec![ - tables::deploy_expired::create_insert_stmt(encoded_hash.clone(), event_log_id, json)?, - tables::deploy_event::create_insert_stmt(event_log_id, encoded_hash)?, + tables::transaction_expired::create_insert_stmt(transaction_type_id_raw, transaction_identifier.clone(), event_log_id, json)?, + tables::transaction_event::create_insert_stmt(event_log_id, transaction_type_id_raw, transaction_identifier)?, ] .iter() .map(|stmt| stmt.to_string($query_materializer_expr)) @@ -200,7 +206,7 @@ impl DatabaseWriter for $extended_type { transaction.commit().await?; } #[cfg(feature = "additional-metrics")] - observe_db_operation_time("save_deploy_expired", start); + observe_db_operation_time("save_transaction_expired", start); res } diff --git a/event_sidecar/src/event_stream_server/endpoint.rs b/event_sidecar/src/event_stream_server/endpoint.rs index e3d72cc0..bd1067a7 100644 --- a/event_sidecar/src/event_stream_server/endpoint.rs +++ b/event_sidecar/src/event_stream_server/endpoint.rs @@ -1,71 +1,20 @@ -use casper_event_types::Filter; #[cfg(test)] use std::fmt::{Display, Formatter}; /// Enum representing all possible endpoints sidecar can have. -/// Be advised that extending variants in this enum requires -/// an update in `is_corresponding_to` function. #[derive(Hash, Eq, PartialEq, Debug, Clone)] pub enum Endpoint { Events, - Main, - Deploys, - Sigs, Sidecar, } -impl Endpoint { - pub fn is_corresponding_to(&self, filter: &Filter) -> bool { - matches!( - (self, filter.clone()), - (Endpoint::Events, Filter::Events) - | (Endpoint::Main, Filter::Main) - | (Endpoint::Deploys, Filter::Deploys) - | (Endpoint::Sigs, Filter::Sigs) - ) - } -} - #[cfg(test)] impl Display for Endpoint { /// This implementation is for test only and created to mimick how Display is implemented for Filter. - /// We use this trick to easily test `is_corresponding_to` with all possible inputs. fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { match self { Endpoint::Events => write!(f, "events"), - Endpoint::Main => write!(f, "events/main"), - Endpoint::Deploys => write!(f, "events/deploys"), - Endpoint::Sigs => write!(f, "events/sigs"), Endpoint::Sidecar => write!(f, "events/sidecar"), } } } - -#[cfg(test)] -mod tests { - use super::Endpoint; - use casper_event_types::Filter; - - #[test] - fn try_resolve_version_should_interpret_correct_build_version() { - let all_filters = vec![Filter::Events, Filter::Main, Filter::Deploys, Filter::Sigs]; - let all_endpoints = vec![ - Endpoint::Events, - Endpoint::Main, - Endpoint::Deploys, - Endpoint::Sigs, - Endpoint::Sidecar, - ]; - for endpoint in all_endpoints.iter() { - for filter in all_filters.iter() { - let endpoint_str = endpoint.to_string(); - let filter_str = filter.to_string(); - let should_be_correspodning = endpoint_str == filter_str; - assert_eq!( - should_be_correspodning, - endpoint.is_corresponding_to(filter) - ); - } - } - } -} diff --git a/event_sidecar/src/event_stream_server/sse_server.rs b/event_sidecar/src/event_stream_server/sse_server.rs index 15dc64e6..41c507e4 100644 --- a/event_sidecar/src/event_stream_server/sse_server.rs +++ b/event_sidecar/src/event_stream_server/sse_server.rs @@ -3,8 +3,8 @@ use super::endpoint::Endpoint; #[cfg(feature = "additional-metrics")] use crate::utils::start_metrics_thread; -use casper_event_types::{sse_data::EventFilter, sse_data::SseData, Deploy, Filter as SseFilter}; -use casper_types::ProtocolVersion; +use casper_event_types::{sse_data::EventFilter, sse_data::SseData, Filter as SseFilter}; +use casper_types::{ProtocolVersion, Transaction}; use futures::{future, Stream, StreamExt}; use http::StatusCode; use hyper::Body; @@ -35,41 +35,24 @@ use warp::{ /// The URL root path. pub const SSE_API_ROOT_PATH: &str = "events"; -/// The URL path part to subscribe to all events other than `DeployAccepted`s and +/// The URL path part to subscribe to all events other than `TransactionAccepted`s and /// `FinalitySignature`s. -pub const SSE_API_MAIN_PATH: &str = "main"; -/// The URL path part to subscribe to only `DeployAccepted` events. -pub const SSE_API_DEPLOYS_PATH: &str = "deploys"; -/// The URL path part to subscribe to only `FinalitySignature` events. -pub const SSE_API_SIGNATURES_PATH: &str = "sigs"; /// The URL path part to subscribe to sidecar specific events. pub const SSE_API_SIDECAR_PATH: &str = "sidecar"; /// The URL query string field name. pub const QUERY_FIELD: &str = "start_from"; /// The filter associated with `/events` path. -const EVENTS_FILTER: [EventFilter; 5] = [ +const EVENTS_FILTER: [EventFilter; 8] = [ EventFilter::ApiVersion, EventFilter::BlockAdded, - EventFilter::DeployProcessed, + EventFilter::TransactionAccepted, + EventFilter::TransactionProcessed, + EventFilter::TransactionExpired, EventFilter::Fault, EventFilter::FinalitySignature, -]; - -/// The filter associated with `/events/main` path. -const MAIN_FILTER: [EventFilter; 6] = [ - EventFilter::ApiVersion, - EventFilter::BlockAdded, - EventFilter::DeployProcessed, - EventFilter::DeployExpired, - EventFilter::Fault, EventFilter::Step, ]; -/// The filter associated with `/events/deploys` path. -const DEPLOYS_FILTER: [EventFilter; 2] = [EventFilter::ApiVersion, EventFilter::DeployAccepted]; -/// The filter associated with `/events/sigs` path. -const SIGNATURES_FILTER: [EventFilter; 2] = - [EventFilter::ApiVersion, EventFilter::FinalitySignature]; /// The filter associated with `/events/sidecar` path. const SIDECAR_FILTER: [EventFilter; 1] = [EventFilter::SidecarVersion]; /// The "id" field of the events sent on the event stream to clients. @@ -78,8 +61,8 @@ type UrlProps = (&'static [EventFilter], &'static Endpoint, Option); #[derive(Serialize)] #[serde(rename_all = "PascalCase")] -pub(super) struct DeployAccepted { - pub(super) deploy_accepted: Arc, +pub(super) struct TransactionAccepted { + pub(super) transaction_accepted: Arc, } /// The components of a single SSE. @@ -173,15 +156,17 @@ async fn filter_map_server_sent_event( Some(Ok(warp_event)) } &SseData::BlockAdded { .. } - | &SseData::DeployProcessed { .. } - | &SseData::DeployExpired { .. } + | &SseData::TransactionProcessed { .. } + | &SseData::TransactionExpired { .. } | &SseData::Fault { .. } | &SseData::Step { .. } | &SseData::FinalitySignature(_) => { let warp_event = event_to_warp_event(event).id(id); Some(Ok(warp_event)) } - SseData::DeployAccepted { deploy } => handle_deploy_accepted(event, deploy, &id), + SseData::TransactionAccepted(transaction) => { + handle_transaction_accepted(event, transaction, &id) + } &SseData::Shutdown => { if should_send_shutdown(event, stream_filter) { build_event_for_outbound(event, id) @@ -195,19 +180,14 @@ async fn filter_map_server_sent_event( fn should_send_shutdown(event: &ServerSentEvent, stream_filter: &Endpoint) -> bool { match (&event.inbound_filter, stream_filter) { (None, Endpoint::Sidecar) => true, + (Some(_), _) => true, (None, _) => false, - (Some(SseFilter::Main), Endpoint::Events) => true, //If this filter handles the `/events` endpoint - // then it should also propagate from inbounds `/events/main` - (Some(SseFilter::Events), Endpoint::Main) => true, //If we are connected to a legacy node - // and the client is listening to /events/main we want to get shutdown from that - (Some(a), b) if b.is_corresponding_to(a) => true, - _ => false, } } -fn handle_deploy_accepted( +fn handle_transaction_accepted( event: &ServerSentEvent, - deploy: &Arc, + transaction: &Arc, id: &String, ) -> Option> { let maybe_value = event @@ -217,10 +197,10 @@ fn handle_deploy_accepted( let warp_event = match maybe_value { Some(json_data) => WarpServerSentEvent::default().json_data(json_data), None => { - let deploy_accepted = &DeployAccepted { - deploy_accepted: deploy.clone(), + let transaction_accepted = &TransactionAccepted { + transaction_accepted: transaction.clone(), }; - WarpServerSentEvent::default().json_data(deploy_accepted) + WarpServerSentEvent::default().json_data(transaction_accepted) } } .unwrap_or_else(|error| { @@ -274,9 +254,6 @@ fn build_event_for_outbound( pub(super) fn path_to_filter(path_param: &str) -> Option<&'static Endpoint> { match path_param { SSE_API_ROOT_PATH => Some(&Endpoint::Events), - SSE_API_MAIN_PATH => Some(&Endpoint::Main), - SSE_API_DEPLOYS_PATH => Some(&Endpoint::Deploys), - SSE_API_SIGNATURES_PATH => Some(&Endpoint::Sigs), SSE_API_SIDECAR_PATH => Some(&Endpoint::Sidecar), _ => None, } @@ -285,9 +262,6 @@ pub(super) fn path_to_filter(path_param: &str) -> Option<&'static Endpoint> { pub(super) fn get_filter(path_param: &str) -> Option<&'static [EventFilter]> { match path_param { SSE_API_ROOT_PATH => Some(&EVENTS_FILTER[..]), - SSE_API_MAIN_PATH => Some(&MAIN_FILTER[..]), - SSE_API_DEPLOYS_PATH => Some(&DEPLOYS_FILTER[..]), - SSE_API_SIGNATURES_PATH => Some(&SIGNATURES_FILTER[..]), SSE_API_SIDECAR_PATH => Some(&SIDECAR_FILTER[..]), _ => None, } @@ -318,11 +292,9 @@ fn parse_query(query: HashMap) -> Result, Response> { /// Creates a 404 response with a useful error message in the body. fn create_404() -> Response { let mut response = Response::new(Body::from(format!( - "invalid path: expected '/{root}/{main}', '/{root}/{deploys}' or '/{root}/{sigs}'\n", + "invalid path: expected '/{root}' or '/{root}/{sidecar}'\n", root = SSE_API_ROOT_PATH, - main = SSE_API_MAIN_PATH, - deploys = SSE_API_DEPLOYS_PATH, - sigs = SSE_API_SIGNATURES_PATH + sidecar = SSE_API_SIDECAR_PATH, ))); *response.status_mut() = StatusCode::NOT_FOUND; response @@ -596,8 +568,7 @@ fn handle_sse_event( #[cfg(test)] mod tests { use super::*; - use casper_event_types::DeployHash; - use casper_types::testing::TestRng; + use casper_types::{testing::TestRng, TransactionHash}; use rand::Rng; use regex::Regex; use std::iter; @@ -611,7 +582,7 @@ mod tests { async fn should_filter_out(event: &ServerSentEvent, filter: &'static [EventFilter]) { assert!( - filter_map_server_sent_event(event, &Endpoint::Main, filter) + filter_map_server_sent_event(event, &Endpoint::Events, filter) .await .is_none(), "should filter out {:?} with {:?}", @@ -622,7 +593,7 @@ mod tests { async fn should_not_filter_out(event: &ServerSentEvent, filter: &'static [EventFilter]) { assert!( - filter_map_server_sent_event(event, &Endpoint::Main, filter) + filter_map_server_sent_event(event, &Endpoint::Events, filter) .await .is_some(), "should not filter out {:?} with {:?}", @@ -650,24 +621,24 @@ mod tests { json_data: None, inbound_filter: None, }; - let (sse_data, deploy) = SseData::random_deploy_accepted(&mut rng); - let deploy_accepted = ServerSentEvent { + let (sse_data, transaction) = SseData::random_transaction_accepted(&mut rng); + let transaction_accepted = ServerSentEvent { id: Some(rng.gen()), data: sse_data, json_data: None, inbound_filter: None, }; - let mut deploys = HashMap::new(); - let _ = deploys.insert(*deploy.hash(), deploy); - let deploy_processed = ServerSentEvent { + let mut transactions = HashMap::new(); + let _ = transactions.insert(transaction.hash(), transaction); + let transaction_processed = ServerSentEvent { id: Some(rng.gen()), - data: SseData::random_deploy_processed(&mut rng), + data: SseData::random_transaction_processed(&mut rng), json_data: None, inbound_filter: None, }; - let deploy_expired = ServerSentEvent { + let transaction_expired = ServerSentEvent { id: Some(rng.gen()), - data: SseData::random_deploy_expired(&mut rng), + data: SseData::random_transaction_expired(&mut rng), json_data: None, inbound_filter: None, }; @@ -693,48 +664,42 @@ mod tests { id: Some(rng.gen()), data: SseData::Shutdown, json_data: None, - inbound_filter: Some(SseFilter::Main), + inbound_filter: Some(SseFilter::Events), //For shutdown we need to provide the inbound //filter because we send shutdowns only to corresponding outbounds to prevent duplicates }; + let sidecar_api_version = ServerSentEvent { + id: Some(rng.gen()), + data: SseData::random_sidecar_version(&mut rng), + json_data: None, + inbound_filter: None, + }; - // `EventFilter::Main` should only filter out `DeployAccepted`s and `FinalitySignature`s. - should_not_filter_out(&api_version, &MAIN_FILTER[..]).await; - should_not_filter_out(&block_added, &MAIN_FILTER[..]).await; - should_not_filter_out(&deploy_processed, &MAIN_FILTER[..]).await; - should_not_filter_out(&deploy_expired, &MAIN_FILTER[..]).await; - should_not_filter_out(&fault, &MAIN_FILTER[..]).await; - should_not_filter_out(&step, &MAIN_FILTER[..]).await; - should_not_filter_out(&shutdown, &MAIN_FILTER).await; - - should_filter_out(&deploy_accepted, &MAIN_FILTER[..]).await; - should_filter_out(&finality_signature, &MAIN_FILTER[..]).await; - - // `EventFilter::DeployAccepted` should filter out everything except `ApiVersion`s and - // `DeployAccepted`s. - should_not_filter_out(&api_version, &DEPLOYS_FILTER[..]).await; - should_not_filter_out(&deploy_accepted, &DEPLOYS_FILTER[..]).await; - should_not_filter_out(&shutdown, &DEPLOYS_FILTER[..]).await; - - should_filter_out(&block_added, &DEPLOYS_FILTER[..]).await; - should_filter_out(&deploy_processed, &DEPLOYS_FILTER[..]).await; - should_filter_out(&deploy_expired, &DEPLOYS_FILTER[..]).await; - should_filter_out(&fault, &DEPLOYS_FILTER[..]).await; - should_filter_out(&finality_signature, &DEPLOYS_FILTER[..]).await; - should_filter_out(&step, &DEPLOYS_FILTER[..]).await; - - // `EventFilter::Signatures` should filter out everything except `ApiVersion`s and - // `FinalitySignature`s. - should_not_filter_out(&api_version, &SIGNATURES_FILTER[..]).await; - should_not_filter_out(&finality_signature, &SIGNATURES_FILTER[..]).await; - should_not_filter_out(&shutdown, &SIGNATURES_FILTER[..]).await; - - should_filter_out(&block_added, &SIGNATURES_FILTER[..]).await; - should_filter_out(&deploy_accepted, &SIGNATURES_FILTER[..]).await; - should_filter_out(&deploy_processed, &SIGNATURES_FILTER[..]).await; - should_filter_out(&deploy_expired, &SIGNATURES_FILTER[..]).await; - should_filter_out(&fault, &SIGNATURES_FILTER[..]).await; - should_filter_out(&step, &SIGNATURES_FILTER[..]).await; + // `EventFilter::Events` should only filter out `SidecarApiVersions`s. + should_not_filter_out(&api_version, &EVENTS_FILTER[..]).await; + should_not_filter_out(&block_added, &EVENTS_FILTER[..]).await; + should_not_filter_out(&transaction_accepted, &EVENTS_FILTER[..]).await; + should_not_filter_out(&transaction_processed, &EVENTS_FILTER[..]).await; + should_not_filter_out(&transaction_expired, &EVENTS_FILTER[..]).await; + should_not_filter_out(&fault, &EVENTS_FILTER[..]).await; + should_not_filter_out(&step, &EVENTS_FILTER[..]).await; + should_not_filter_out(&shutdown, &EVENTS_FILTER).await; + should_not_filter_out(&api_version, &EVENTS_FILTER[..]).await; + should_not_filter_out(&finality_signature, &EVENTS_FILTER[..]).await; + should_filter_out(&sidecar_api_version, &EVENTS_FILTER[..]).await; + + // `EventFilter::Events` should only filter out `SidecarApiVersions`s. + should_filter_out(&api_version, &SIDECAR_FILTER[..]).await; + should_filter_out(&block_added, &SIDECAR_FILTER[..]).await; + should_filter_out(&transaction_accepted, &SIDECAR_FILTER[..]).await; + should_filter_out(&transaction_processed, &SIDECAR_FILTER[..]).await; + should_filter_out(&transaction_expired, &SIDECAR_FILTER[..]).await; + should_filter_out(&fault, &SIDECAR_FILTER[..]).await; + should_filter_out(&step, &SIDECAR_FILTER[..]).await; + should_filter_out(&api_version, &SIDECAR_FILTER[..]).await; + should_filter_out(&finality_signature, &SIDECAR_FILTER[..]).await; + should_not_filter_out(&shutdown, &SIDECAR_FILTER).await; + should_not_filter_out(&sidecar_api_version, &SIDECAR_FILTER[..]).await; } /// This test checks that events with incorrect IDs (i.e. no types have an ID except for @@ -756,24 +721,24 @@ mod tests { json_data: None, inbound_filter: None, }; - let (sse_data, deploy) = SseData::random_deploy_accepted(&mut rng); - let malformed_deploy_accepted = ServerSentEvent { + let (sse_data, transaction) = SseData::random_transaction_accepted(&mut rng); + let malformed_transaction_accepted = ServerSentEvent { id: None, data: sse_data, json_data: None, inbound_filter: None, }; - let mut deploys = HashMap::new(); - let _ = deploys.insert(*deploy.hash(), deploy); - let malformed_deploy_processed = ServerSentEvent { + let mut transactions = HashMap::new(); + let _ = transactions.insert(transaction.hash(), transaction); + let malformed_transaction_processed = ServerSentEvent { id: None, - data: SseData::random_deploy_processed(&mut rng), + data: SseData::random_transaction_processed(&mut rng), json_data: None, inbound_filter: None, }; - let malformed_deploy_expired = ServerSentEvent { + let malformed_transaction_expired = ServerSentEvent { id: None, - data: SseData::random_deploy_expired(&mut rng), + data: SseData::random_transaction_expired(&mut rng), json_data: None, inbound_filter: None, }; @@ -802,16 +767,12 @@ mod tests { inbound_filter: None, }; - for filter in &[ - &MAIN_FILTER[..], - &DEPLOYS_FILTER[..], - &SIGNATURES_FILTER[..], - ] { + for filter in &[&EVENTS_FILTER[..], &SIDECAR_FILTER[..]] { should_filter_out(&malformed_api_version, filter).await; should_filter_out(&malformed_block_added, filter).await; - should_filter_out(&malformed_deploy_accepted, filter).await; - should_filter_out(&malformed_deploy_processed, filter).await; - should_filter_out(&malformed_deploy_expired, filter).await; + should_filter_out(&malformed_transaction_accepted, filter).await; + should_filter_out(&malformed_transaction_processed, filter).await; + should_filter_out(&malformed_transaction_expired, filter).await; should_filter_out(&malformed_fault, filter).await; should_filter_out(&malformed_finality_signature, filter).await; should_filter_out(&malformed_step, filter).await; @@ -820,10 +781,10 @@ mod tests { } #[allow(clippy::too_many_lines)] - async fn should_filter_duplicate_events(path_filter: &str) { + async fn should_filter_duplicate_events() { let mut rng = TestRng::new(); - let mut deploys = HashMap::new(); + let mut transactions = HashMap::new(); let initial_events: Vec = iter::once(ServerSentEvent::initial_event(ProtocolVersion::V1_0_0)) @@ -831,8 +792,7 @@ mod tests { &mut rng, 0, NUM_INITIAL_EVENTS, - path_filter, - &mut deploys, + &mut transactions, )) .collect(); @@ -844,8 +804,7 @@ mod tests { &mut rng, *duplicate_count, &initial_events, - path_filter, - &mut deploys, + &mut transactions, ); let (initial_events_sender, initial_events_receiver) = mpsc::unbounded_channel(); @@ -865,7 +824,7 @@ mod tests { drop(initial_events_sender); drop(ongoing_events_sender); - let stream_filter = path_to_filter(path_filter).unwrap(); + let stream_filter = path_to_filter(SSE_API_ROOT_PATH).unwrap(); #[cfg(feature = "additional-metrics")] let (tx, rx) = channel(1000); // Collect the events emitted by `stream_to_client()` - should not contain duplicates. @@ -873,7 +832,7 @@ mod tests { initial_events_receiver, ongoing_events_receiver, stream_filter, - get_filter(path_filter).unwrap(), + get_filter(SSE_API_ROOT_PATH).unwrap(), #[cfg(feature = "additional-metrics")] tx, ) @@ -925,45 +884,32 @@ mod tests { /// This test checks that main events from the initial stream which are duplicated in the /// ongoing stream are filtered out. #[tokio::test] - async fn should_filter_duplicate_main_events() { - should_filter_duplicate_events(SSE_API_MAIN_PATH).await - } - - /// This test checks that deploy-accepted events from the initial stream which are duplicated in - /// the ongoing stream are filtered out. - #[tokio::test] - async fn should_filter_duplicate_deploys_events() { - should_filter_duplicate_events(SSE_API_DEPLOYS_PATH).await - } - - /// This test checks that signature events from the initial stream which are duplicated in the - /// ongoing stream are filtered out. - #[tokio::test] - async fn should_filter_duplicate_signature_events() { - should_filter_duplicate_events(SSE_API_SIGNATURES_PATH).await + async fn should_filter_duplicate_firehose_events() { + should_filter_duplicate_events().await } - // Returns `count` random SSE events, all of a single variant defined by `path_filter`. The - // events will have sequential IDs starting from `start_id`, and if the path filter - // indicates the events should be deploy-accepted ones, the corresponding random deploys - // will be inserted into `deploys`. + // Returns `count` random SSE events. The events will have sequential IDs starting from `start_id`, and if the path filter + // indicates the events should be transaction-accepted ones, the corresponding random transactions + // will be inserted into `transactions`. fn make_random_events( rng: &mut TestRng, start_id: Id, count: usize, - path_filter: &str, - deploys: &mut HashMap, + transactions: &mut HashMap, ) -> Vec { (start_id..(start_id + count as u32)) .map(|id| { - let data = match path_filter { - SSE_API_MAIN_PATH => SseData::random_block_added(rng), - SSE_API_DEPLOYS_PATH => { - let (event, deploy) = SseData::random_deploy_accepted(rng); - assert!(deploys.insert(*deploy.hash(), deploy).is_none()); + let discriminator = id % 3; + let data = match discriminator { + 0 => SseData::random_block_added(rng), + 1 => { + let (event, transaction) = SseData::random_transaction_accepted(rng); + assert!(transactions + .insert(transaction.hash(), transaction) + .is_none()); event } - SSE_API_SIGNATURES_PATH => SseData::random_finality_signature(rng), + 2 => SseData::random_finality_signature(rng), _ => unreachable!(), }; ServerSentEvent { @@ -983,8 +929,7 @@ mod tests { rng: &mut TestRng, duplicate_count: usize, initial_events: &[ServerSentEvent], - path_filter: &str, - deploys: &mut HashMap, + transactions: &mut HashMap, ) -> Vec { assert!(duplicate_count < initial_events.len()); let initial_skip_count = initial_events.len() - duplicate_count; @@ -998,8 +943,7 @@ mod tests { rng, unique_start_id, unique_count, - path_filter, - deploys, + transactions, )) .collect() } diff --git a/event_sidecar/src/event_stream_server/tests.rs b/event_sidecar/src/event_stream_server/tests.rs index 3f4c64dd..e229d0ae 100644 --- a/event_sidecar/src/event_stream_server/tests.rs +++ b/event_sidecar/src/event_stream_server/tests.rs @@ -6,9 +6,8 @@ use pretty_assertions::assert_eq; use reqwest::Response; use serde_json::Value; use sse_server::{ - DeployAccepted, Id, QUERY_FIELD, SSE_API_DEPLOYS_PATH as DEPLOYS_PATH, - SSE_API_MAIN_PATH as MAIN_PATH, SSE_API_ROOT_PATH as ROOT_PATH, - SSE_API_SIGNATURES_PATH as SIGS_PATH, + Id, TransactionAccepted, QUERY_FIELD, SSE_API_ROOT_PATH as ROOT_PATH, + SSE_API_SIDECAR_PATH as SIDECAR_PATH, }; use std::{ collections::HashMap, @@ -207,17 +206,19 @@ impl TestFixture { fs::create_dir_all(&storage_dir).unwrap(); let protocol_version = ProtocolVersion::from_parts(1, 2, 3); - let mut deploys = HashMap::new(); + let mut transactions = HashMap::new(); let events: Vec<(SseData, Option)> = (0..EVENT_COUNT) .map(|i| match i % DISTINCT_EVENTS_COUNT { 0 => SseData::random_block_added(rng), 1 => { - let (event, deploy) = SseData::random_deploy_accepted(rng); - assert!(deploys.insert(*deploy.hash(), deploy).is_none()); + let (event, transaction) = SseData::random_transaction_accepted(rng); + assert!(transactions + .insert(transaction.hash(), transaction) + .is_none()); event } - 2 => SseData::random_deploy_processed(rng), - 3 => SseData::random_deploy_expired(rng), + 2 => SseData::random_transaction_processed(rng), + 3 => SseData::random_transaction_expired(rng), 4 => SseData::random_fault(rng), 5 => SseData::random_step(rng), 6 => SseData::random_finality_signature(rng), @@ -284,7 +285,7 @@ impl TestFixture { }; let api_version_event = SseData::ApiVersion(protocol_version); - server.broadcast(api_version_event.clone(), Some(SseFilter::Main), None); + server.broadcast(api_version_event.clone(), Some(SseFilter::Events), None); for (id, (event, maybe_json_data)) in events.iter().cycle().enumerate().take(event_count as usize) { @@ -297,7 +298,7 @@ impl TestFixture { .await; server.broadcast( event.clone(), - Some(SseFilter::Main), + Some(SseFilter::Events), maybe_json_data .as_ref() .map(|el| serde_json::from_str(el.as_str()).unwrap()), @@ -622,7 +623,7 @@ fn parse_response(response_text: String, client_id: &str) -> Vec /// * no `?start_from=` query /// * connected before first event /// -/// Expected to receive all main, deploy-accepted or signature events depending on `filter`. +/// Expected to receive all main, transaction-accepted or signature events depending on `filter`. async fn should_serve_events_with_no_query(path: &str) { let mut rng = TestRng::new(); let mut fixture = TestFixture::new(&mut rng); @@ -640,25 +641,15 @@ async fn should_serve_events_with_no_query(path: &str) { } #[tokio::test(flavor = "multi_thread", worker_threads = 2)] -async fn should_serve_main_events_with_no_query() { - should_serve_events_with_no_query(MAIN_PATH).await; -} - -#[tokio::test(flavor = "multi_thread", worker_threads = 2)] -async fn should_serve_deploy_accepted_events_with_no_query() { - should_serve_events_with_no_query(DEPLOYS_PATH).await; -} - -#[tokio::test(flavor = "multi_thread", worker_threads = 2)] -async fn should_serve_signature_events_with_no_query() { - should_serve_events_with_no_query(SIGS_PATH).await; +async fn should_serve_firehose_events_with_no_query() { + should_serve_events_with_no_query(ROOT_PATH).await; } /// Client setup: /// * `/events/?start_from=25` /// * connected just before event ID 50 /// -/// Expected to receive main, deploy-accepted or signature events (depending on `path`) from ID 25 +/// Expected to receive main, transaction-accepted or signature events (depending on `path`) from ID 25 /// onwards, as events 25 to 49 should still be in the server buffer. async fn should_serve_events_with_query(path: &str) { let mut rng = TestRng::new(); @@ -680,25 +671,15 @@ async fn should_serve_events_with_query(path: &str) { } #[tokio::test(flavor = "multi_thread", worker_threads = 2)] -async fn should_serve_main_events_with_query() { - should_serve_events_with_query(MAIN_PATH).await; -} - -#[tokio::test(flavor = "multi_thread", worker_threads = 2)] -async fn should_serve_deploy_accepted_events_with_query() { - should_serve_events_with_query(DEPLOYS_PATH).await; -} - -#[tokio::test(flavor = "multi_thread", worker_threads = 2)] -async fn should_serve_signature_events_with_query() { - should_serve_events_with_query(SIGS_PATH).await; +async fn should_serve_firehose_events_with_query() { + should_serve_events_with_query(ROOT_PATH).await; } /// Client setup: /// * `/events/?start_from=0` /// * connected just before event ID 75 /// -/// Expected to receive main, deploy-accepted or signature events (depending on `path`) from ID 25 +/// Expected to receive main, transaction-accepted or signature events (depending on `path`) from ID 25 /// onwards, as events 0 to 24 should have been purged from the server buffer. async fn should_serve_remaining_events_with_query(path: &str) { let mut rng = TestRng::new(); @@ -721,25 +702,15 @@ async fn should_serve_remaining_events_with_query(path: &str) { } #[tokio::test(flavor = "multi_thread", worker_threads = 2)] -async fn should_serve_remaining_main_events_with_query() { - should_serve_remaining_events_with_query(MAIN_PATH).await; -} - -#[tokio::test(flavor = "multi_thread", worker_threads = 2)] -async fn should_serve_remaining_deploy_accepted_events_with_query() { - should_serve_remaining_events_with_query(DEPLOYS_PATH).await; -} - -#[tokio::test(flavor = "multi_thread", worker_threads = 2)] -async fn should_serve_remaining_signature_events_with_query() { - should_serve_remaining_events_with_query(SIGS_PATH).await; +async fn should_serve_remaining_firehose_events_with_query() { + should_serve_remaining_events_with_query(ROOT_PATH).await; } /// Client setup: /// * `/events/?start_from=25` /// * connected before first event /// -/// Expected to receive all main, deploy-accepted or signature events (depending on `path`), as +/// Expected to receive all main, transaction-accepted or signature events (depending on `path`), as /// event 25 hasn't been added to the server buffer yet. async fn should_serve_events_with_query_for_future_event(path: &str) { let mut rng = TestRng::new(); @@ -758,18 +729,8 @@ async fn should_serve_events_with_query_for_future_event(path: &str) { } #[tokio::test(flavor = "multi_thread", worker_threads = 2)] -async fn should_serve_main_events_with_query_for_future_event() { - should_serve_events_with_query_for_future_event(MAIN_PATH).await; -} - -#[tokio::test(flavor = "multi_thread", worker_threads = 2)] -async fn should_serve_deploy_accepted_events_with_query_for_future_event() { - should_serve_events_with_query_for_future_event(DEPLOYS_PATH).await; -} - -#[tokio::test(flavor = "multi_thread", worker_threads = 2)] -async fn should_serve_signature_events_with_query_for_future_event() { - should_serve_events_with_query_for_future_event(SIGS_PATH).await; +async fn should_serve_firehose_events_with_query_for_future_event() { + should_serve_events_with_query_for_future_event(ROOT_PATH).await; } /// Checks that when a server is shut down (e.g. for a node upgrade), connected clients don't have @@ -782,39 +743,24 @@ async fn server_exit_should_gracefully_shut_down_stream() { // Start the server, waiting for three clients to connect. let mut server_behavior = ServerBehavior::new(); let barrier1 = server_behavior.add_client_sync_before_event(0); - let barrier2 = server_behavior.add_client_sync_before_event(0); - let barrier3 = server_behavior.add_client_sync_before_event(0); let server_address = fixture.run_server(server_behavior).await; - let url1 = url(server_address, MAIN_PATH, None); - let url2 = url(server_address, DEPLOYS_PATH, None); - let url3 = url(server_address, SIGS_PATH, None); + let url1 = url(server_address, ROOT_PATH, None); // Run the three clients, and stop the server after a short delay. - let (received_events1, received_events2, received_events3, _) = join!( - subscribe(&url1, barrier1, EVENT_COUNT, "client 1"), - subscribe(&url2, barrier2, EVENT_COUNT, "client 2"), - subscribe(&url3, barrier3, EVENT_COUNT, "client 3"), - async { - time::sleep(DELAY_BETWEEN_EVENTS * EVENT_COUNT / 2).await; - fixture.stop_server().await - } - ); + let (received_events1, _) = join!(subscribe(&url1, barrier1, EVENT_COUNT, "client 1"), async { + time::sleep(DELAY_BETWEEN_EVENTS * EVENT_COUNT / 2).await; + fixture.stop_server().await + }); // Ensure all clients' streams terminated without error. let received_events1 = received_events1.unwrap(); - let received_events2 = received_events2.unwrap(); - let received_events3 = received_events3.unwrap(); // Ensure all clients received some events... assert!(!received_events1.is_empty()); - assert!(!received_events2.is_empty()); - assert!(!received_events3.is_empty()); // ...but not the full set they would have if the server hadn't stopped early. - assert!(received_events1.len() < fixture.all_filtered_events(MAIN_PATH).0.len()); - assert!(received_events2.len() < fixture.all_filtered_events(DEPLOYS_PATH).0.len()); - assert!(received_events3.len() < fixture.all_filtered_events(SIGS_PATH).0.len()); + assert!(received_events1.len() < fixture.all_filtered_events(ROOT_PATH).0.len()); } /// Checks that clients which don't consume the events in a timely manner are forcibly disconnected @@ -829,21 +775,13 @@ async fn lagging_clients_should_be_disconnected() { // at most `MAX_EVENT_COUNT` events, but the clients' futures should return before that, having // been disconnected for lagging. let mut server_behavior = ServerBehavior::new_for_lagging_test(); - let barrier_main = server_behavior.add_client_sync_before_event(0); - let barrier_deploys = server_behavior.add_client_sync_before_event(0); - let barrier_sigs = server_behavior.add_client_sync_before_event(0); + let barrier_events = server_behavior.add_client_sync_before_event(0); let server_address = fixture.run_server(server_behavior).await; - let url_main = url(server_address, MAIN_PATH, None); - let url_deploys = url(server_address, DEPLOYS_PATH, None); - let url_sigs = url(server_address, SIGS_PATH, None); + let url_events = url(server_address, ROOT_PATH, None); // Run the slow clients, then stop the server. - let (result_slow_main, result_slow_deploys, result_slow_sigs) = join!( - subscribe_slow(&url_main, barrier_main, "client 1"), - subscribe_slow(&url_deploys, barrier_deploys, "client 2"), - subscribe_slow(&url_sigs, barrier_sigs, "client 3"), - ); + let result_slow_events = subscribe_slow(&url_events, barrier_events, "client 1").await; fixture.stop_server().await; // Ensure both slow clients' streams terminated with an `UnexpectedEof` error. let check_error = |result: Result<(), reqwest::Error>| { @@ -860,9 +798,7 @@ async fn lagging_clients_should_be_disconnected() { .kind(); assert!(matches!(kind, io::ErrorKind::UnexpectedEof)); }; - check_error(result_slow_main); - check_error(result_slow_deploys); - check_error(result_slow_sigs); + check_error(result_slow_events); } /// Checks that clients using the correct but wrong path get a helpful error response. @@ -882,23 +818,11 @@ async fn should_handle_bad_url_path() { format!("http://{}/{}?{}=0", server_address, QUERY_FIELD, ROOT_PATH), format!("http://{}/{}/bad", server_address, ROOT_PATH), format!("http://{}/{}/bad?{}=0", server_address, QUERY_FIELD, ROOT_PATH), - format!("http://{}/{}/{}bad", server_address, ROOT_PATH, MAIN_PATH), - format!("http://{}/{}/{}bad?{}=0", server_address, QUERY_FIELD, ROOT_PATH, MAIN_PATH), - format!("http://{}/{}/{}bad", server_address, ROOT_PATH, DEPLOYS_PATH), - format!("http://{}/{}/{}bad?{}=0", server_address, QUERY_FIELD, ROOT_PATH, DEPLOYS_PATH), - format!("http://{}/{}/{}bad", server_address, ROOT_PATH, SIGS_PATH), - format!("http://{}/{}/{}bad?{}=0", server_address, QUERY_FIELD, ROOT_PATH, SIGS_PATH), - format!("http://{}/{}/{}/bad", server_address, ROOT_PATH, MAIN_PATH), - format!("http://{}/{}/{}/bad?{}=0", server_address, QUERY_FIELD, ROOT_PATH, MAIN_PATH), - format!("http://{}/{}/{}/bad", server_address, ROOT_PATH, DEPLOYS_PATH), - format!("http://{}/{}/{}/bad?{}=0", server_address, QUERY_FIELD, ROOT_PATH, DEPLOYS_PATH), - format!("http://{}/{}/{}/bad", server_address, ROOT_PATH, SIGS_PATH), - format!("http://{}/{}/{}/bad?{}=0", server_address, QUERY_FIELD, ROOT_PATH, SIGS_PATH), ]; let expected_body = format!( - "invalid path: expected '/{0}/{1}', '/{0}/{2}' or '/{0}/{3}'", - ROOT_PATH, MAIN_PATH, DEPLOYS_PATH, SIGS_PATH + "invalid path: expected '/{0}' or '/{0}/{1}'", + ROOT_PATH, SIDECAR_PATH ); for url in &urls { let response = reqwest::get(url).await.unwrap(); @@ -921,34 +845,21 @@ async fn start_query_url_test() -> (TestFixture, SocketAddr) { (fixture, server_address) } -fn build_urls(server_address: SocketAddr) -> (String, String, String) { - let main_url = format!("http://{}/{}/{}", server_address, ROOT_PATH, MAIN_PATH); - let deploys_url = format!("http://{}/{}/{}", server_address, ROOT_PATH, DEPLOYS_PATH); - let sigs_url = format!("http://{}/{}/{}", server_address, ROOT_PATH, SIGS_PATH); - (main_url, deploys_url, sigs_url) +fn build_urls(server_address: SocketAddr) -> String { + format!("http://{}/{}", server_address, ROOT_PATH) } /// Checks that clients using the correct but wrong query get a helpful error /// response. #[tokio::test(flavor = "multi_thread", worker_threads = 2)] async fn should_handle_bad_url_query() { let (mut fixture, server_address) = start_query_url_test().await; - let (main_url, deploys_url, sigs_url) = build_urls(server_address); + let events_url = build_urls(server_address); let urls = [ - format!("{}?not-a-kv-pair", main_url), - format!("{}?not-a-kv-pair", deploys_url), - format!("{}?not-a-kv-pair", sigs_url), - format!("{}?start_fro=0", main_url), - format!("{}?start_fro=0", deploys_url), - format!("{}?start_fro=0", sigs_url), - format!("{}?{}=not-integer", main_url, QUERY_FIELD), - format!("{}?{}=not-integer", deploys_url, QUERY_FIELD), - format!("{}?{}=not-integer", sigs_url, QUERY_FIELD), - format!("{}?{}='0'", main_url, QUERY_FIELD), - format!("{}?{}='0'", deploys_url, QUERY_FIELD), - format!("{}?{}='0'", sigs_url, QUERY_FIELD), - format!("{}?{}=0&extra=1", main_url, QUERY_FIELD), - format!("{}?{}=0&extra=1", deploys_url, QUERY_FIELD), - format!("{}?{}=0&extra=1", sigs_url, QUERY_FIELD), + format!("{}?not-a-kv-pair", events_url), + format!("{}?start_fro=0", events_url), + format!("{}?{}=not-integer", events_url, QUERY_FIELD), + format!("{}?{}='0'", events_url, QUERY_FIELD), + format!("{}?{}=0&extra=1", events_url, QUERY_FIELD), ]; let expected_body = format!( "invalid query: expected single field '{}='", @@ -1024,17 +935,7 @@ async fn should_persist_event_ids(path: &str) { #[tokio::test(flavor = "multi_thread", worker_threads = 2)] async fn should_persist_main_event_ids() { - should_persist_event_ids(MAIN_PATH).await; -} - -#[tokio::test(flavor = "multi_thread", worker_threads = 2)] -async fn should_persist_deploy_accepted_event_ids() { - should_persist_event_ids(DEPLOYS_PATH).await; -} - -#[tokio::test(flavor = "multi_thread", worker_threads = 2)] -async fn should_persist_signature_event_ids() { - should_persist_event_ids(SIGS_PATH).await; + should_persist_event_ids(ROOT_PATH).await; } /// Check that a server handles wrapping round past the maximum value for event IDs. @@ -1081,18 +982,8 @@ async fn should_handle_wrapping_past_max_event_id(path: &str) { } #[tokio::test(flavor = "multi_thread", worker_threads = 2)] -async fn should_handle_wrapping_past_max_event_id_for_main() { - should_handle_wrapping_past_max_event_id(MAIN_PATH).await; -} - -#[tokio::test(flavor = "multi_thread", worker_threads = 2)] -async fn should_handle_wrapping_past_max_event_id_for_deploy_accepted() { - should_handle_wrapping_past_max_event_id(DEPLOYS_PATH).await; -} - -#[tokio::test(flavor = "multi_thread", worker_threads = 2)] -async fn should_handle_wrapping_past_max_event_id_for_signatures() { - should_handle_wrapping_past_max_event_id(SIGS_PATH).await; +async fn should_handle_wrapping_past_max_event_id_for_events() { + should_handle_wrapping_past_max_event_id(ROOT_PATH).await; } /// Checks that a server rejects new clients with an HTTP 503 when it already has the specified @@ -1115,65 +1006,50 @@ async fn should_limit_concurrent_subscribers() { let barrier6 = server_behavior.add_client_sync_before_event(1); let server_address = fixture.run_server(server_behavior).await; - let url_main = url(server_address, MAIN_PATH, None); - let url_deploys = url(server_address, DEPLOYS_PATH, None); - let url_sigs = url(server_address, SIGS_PATH, None); + let url_main = url(server_address, ROOT_PATH, None); - let (expected_main_events, final_main_id) = fixture.all_filtered_events(MAIN_PATH); - let (expected_deploys_events, final_deploys_id) = fixture.all_filtered_events(DEPLOYS_PATH); - let (expected_sigs_events, final_sigs_id) = fixture.all_filtered_events(SIGS_PATH); + let (expected_events, final_id) = fixture.all_filtered_events(ROOT_PATH); // Run the six clients. let ( - received_events_main, - received_events_deploys, - received_events_sigs, - empty_events_main, - empty_events_deploys, - empty_events_sigs, + received_events_1, + received_events_2, + received_events_3, + empty_events_1, + empty_events_2, + empty_events_3, ) = join!( - subscribe(&url_main, barrier1, final_main_id, "client 1"), - subscribe(&url_deploys, barrier2, final_deploys_id, "client 2"), - subscribe(&url_sigs, barrier3, final_sigs_id, "client 3"), - subscribe(&url_main, barrier4, final_main_id, "client 4"), - subscribe(&url_deploys, barrier5, final_deploys_id, "client 5"), - subscribe(&url_sigs, barrier6, final_sigs_id, "client 6"), + subscribe(&url_main, barrier1, final_id, "client 1"), + subscribe(&url_main, barrier2, final_id, "client 2"), + subscribe(&url_main, barrier3, final_id, "client 3"), + subscribe(&url_main, barrier4, final_id, "client 4"), + subscribe(&url_main, barrier5, final_id, "client 5"), + subscribe(&url_main, barrier6, final_id, "client 6"), ); // Check the first three received all expected events. - assert_eq!(received_events_main.unwrap(), expected_main_events); - assert_eq!(received_events_deploys.unwrap(), expected_deploys_events); - assert_eq!(received_events_sigs.unwrap(), expected_sigs_events); + assert_eq!(received_events_1.unwrap(), expected_events); + assert_eq!(received_events_2.unwrap(), expected_events); + assert_eq!(received_events_3.unwrap(), expected_events); // Check the second three received no events. - assert!(empty_events_main.unwrap().is_empty()); - assert!(empty_events_deploys.unwrap().is_empty()); - assert!(empty_events_sigs.unwrap().is_empty()); + assert!(empty_events_1.unwrap().is_empty()); + assert!(empty_events_2.unwrap().is_empty()); + assert!(empty_events_3.unwrap().is_empty()); // Check that now the first clients have all disconnected, three new clients can connect. Have // them start from event 80 to allow them to actually pull some events off the stream (as the // server has by now stopped creating any new events). let start_id = EVENT_COUNT - 20; - let url_main = url(server_address, MAIN_PATH, Some(start_id)); - let url_deploys = url(server_address, DEPLOYS_PATH, Some(start_id)); - let url_sigs = url(server_address, SIGS_PATH, Some(start_id)); + let url_main = url(server_address, ROOT_PATH, Some(start_id)); - let (expected_main_events, final_main_id) = fixture.filtered_events(MAIN_PATH, start_id); - let (expected_deploys_events, final_deploys_id) = - fixture.filtered_events(DEPLOYS_PATH, start_id); - let (expected_sigs_events, final_sigs_id) = fixture.filtered_events(SIGS_PATH, start_id); + let (expected_main_events, final_main_id) = fixture.filtered_events(ROOT_PATH, start_id); - let (received_events_main, received_events_deploys, received_events_sigs) = join!( - subscribe_no_sync(&url_main, final_main_id, "client 7"), - subscribe_no_sync(&url_deploys, final_deploys_id, "client 8"), - subscribe_no_sync(&url_sigs, final_sigs_id, "client 9"), - ); + let received_events_main = subscribe_no_sync(&url_main, final_main_id, "client 7").await; // Check the last three clients' received events are as expected. assert_eq!(received_events_main.unwrap(), expected_main_events); - assert_eq!(received_events_deploys.unwrap(), expected_deploys_events); - assert_eq!(received_events_sigs.unwrap(), expected_sigs_events); fixture.stop_server().await; } @@ -1185,10 +1061,12 @@ fn build_id_filter(from: u128) -> FilterLambda { } let data = match event { - SseData::DeployAccepted { deploy } => serde_json::to_string(&DeployAccepted { - deploy_accepted: deploy.clone(), - }) - .unwrap(), + SseData::TransactionAccepted(transaction) => { + serde_json::to_string(&TransactionAccepted { + transaction_accepted: transaction.clone(), + }) + .unwrap() + } _ => serde_json::to_string(event).unwrap(), }; diff --git a/event_sidecar/src/lib.rs b/event_sidecar/src/lib.rs index 7bca20d0..56457231 100644 --- a/event_sidecar/src/lib.rs +++ b/event_sidecar/src/lib.rs @@ -34,6 +34,7 @@ use casper_event_listener::{ EventListener, EventListenerBuilder, NodeConnectionInterface, SseEvent, }; use casper_event_types::{metrics, sse_data::SseData, Filter}; +use casper_types::ProtocolVersion; use futures::future::join_all; use hex_fmt::HexFmt; use tokio::{ @@ -375,107 +376,108 @@ async fn handle_single_event { + SseData::TransactionAccepted(transaction) => { + let transaction_accepted = TransactionAccepted::new(transaction.clone()); + let entity_identifier = transaction_accepted.identifier(); if enable_event_logging { - let hex_deploy_hash = HexFmt(deploy.hash().inner()); - info!("Deploy Accepted: {:18}", hex_deploy_hash); - debug!("Deploy Accepted: {}", hex_deploy_hash); + info!("Transaction Accepted: {:18}", entity_identifier); + debug!("Transaction Accepted: {}", entity_identifier); } - let deploy_accepted = DeployAccepted::new(deploy.clone()); count_internal_event("main_inbound_sse_data", "db_save_start"); let res = database - .save_deploy_accepted( - deploy_accepted, + .save_transaction_accepted( + transaction_accepted, sse_event.id, sse_event.source.to_string(), sse_event.api_version, ) .await; handle_database_save_result( - "DeployAccepted", - HexFmt(deploy.hash().inner()).to_string().as_str(), + "TransactionAccepted", + &entity_identifier, res, &outbound_sse_data_sender, sse_event.inbound_filter, sse_event.json_data, - || SseData::DeployAccepted { deploy }, + || SseData::TransactionAccepted(transaction), ) .await; } - SseData::DeployExpired { deploy_hash } => { + SseData::TransactionExpired { transaction_hash } => { + let transaction_expired = TransactionExpired::new(transaction_hash); + let entity_identifier = transaction_expired.identifier(); if enable_event_logging { - let hex_deploy_hash = HexFmt(deploy_hash.inner()); - info!("Deploy Expired: {:18}", hex_deploy_hash); - debug!("Deploy Expired: {}", hex_deploy_hash); + info!("Transaction Expired: {:18}", entity_identifier); + debug!("Transaction Expired: {}", entity_identifier); } count_internal_event("main_inbound_sse_data", "db_save_start"); let res = database - .save_deploy_expired( - DeployExpired::new(deploy_hash), + .save_transaction_expired( + transaction_expired, sse_event.id, sse_event.source.to_string(), sse_event.api_version, ) .await; handle_database_save_result( - "DeployExpired", - HexFmt(deploy_hash.inner()).to_string().as_str(), + "TransactionExpired", + &entity_identifier, res, &outbound_sse_data_sender, sse_event.inbound_filter, sse_event.json_data, - || SseData::DeployExpired { deploy_hash }, + || SseData::TransactionExpired { transaction_hash }, ) .await; } - SseData::DeployProcessed { - deploy_hash, - account, + SseData::TransactionProcessed { + transaction_hash, + initiator_addr, timestamp, ttl, - dependencies, block_hash, execution_result, + messages, } => { - if enable_event_logging { - let hex_deploy_hash = HexFmt(deploy_hash.inner()); - info!("Deploy Processed: {:18}", hex_deploy_hash); - debug!("Deploy Processed: {}", hex_deploy_hash); - } - let deploy_processed = DeployProcessed::new( - deploy_hash.clone(), - account.clone(), + //TODO fix all these clones + let transaction_processed = TransactionProcessed::new( + transaction_hash.clone(), + initiator_addr.clone(), timestamp, ttl, - dependencies.clone(), block_hash.clone(), execution_result.clone(), + messages.clone(), ); + let entity_identifier = transaction_processed.identifier(); + if enable_event_logging { + info!("Transaction Processed: {:18}", entity_identifier); + debug!("Transaction Processed: {}", entity_identifier); + } count_internal_event("main_inbound_sse_data", "db_save_start"); let res = database - .save_deploy_processed( - deploy_processed.clone(), + .save_transaction_processed( + transaction_processed, sse_event.id, sse_event.source.to_string(), sse_event.api_version, ) .await; - handle_database_save_result( - "DeployProcessed", - HexFmt(deploy_hash.inner()).to_string().as_str(), + "TransactionProcessed", + &entity_identifier, res, &outbound_sse_data_sender, sse_event.inbound_filter, sse_event.json_data, - || SseData::DeployProcessed { - deploy_hash, - account, + || SseData::TransactionProcessed { + transaction_hash, + initiator_addr, timestamp, ttl, - dependencies, block_hash, execution_result, + messages, }, ) .await; @@ -543,9 +545,9 @@ async fn handle_single_event { - let step = Step::new(era_id, execution_effect.clone()); + let step = Step::new(era_id, execution_effects.clone()); if enable_event_logging { info!("Step at era: {}", era_id.value()); } @@ -567,7 +569,7 @@ async fn handle_single_event>, - version: casper_types::ProtocolVersion, + version: ProtocolVersion, outbound_sse_data_sender: &Sender<(SseData, Option, Option)>, filter: Filter, enable_event_logging: bool, @@ -704,10 +706,7 @@ fn handle_events_in_thread HashMap, Receiver)> { let mut map = HashMap::new(); - map.insert(Filter::Deploys, mpsc_channel(cache_size)); map.insert(Filter::Events, mpsc_channel(cache_size)); - map.insert(Filter::Main, mpsc_channel(cache_size)); - map.insert(Filter::Sigs, mpsc_channel(cache_size)); map } diff --git a/event_sidecar/src/rest_server/filters.rs b/event_sidecar/src/rest_server/filters.rs index e10435e7..8a5f16cd 100644 --- a/event_sidecar/src/rest_server/filters.rs +++ b/event_sidecar/src/rest_server/filters.rs @@ -1,11 +1,37 @@ use super::{errors::handle_rejection, handlers, openapi::build_open_api_filters}; use crate::{ - types::database::DatabaseReader, + types::database::{DatabaseReader, TransactionTypeId}, utils::{root_filter, InvalidPath}, }; -use std::convert::Infallible; +use std::{convert::Infallible, str::FromStr}; use warp::Filter; +pub enum TransactionTypeIdFilter { + Deploy, + Version1, +} + +impl From for TransactionTypeId { + fn from(val: TransactionTypeIdFilter) -> Self { + match val { + TransactionTypeIdFilter::Deploy => TransactionTypeId::Deploy, + TransactionTypeIdFilter::Version1 => TransactionTypeId::Version1, + } + } +} + +impl FromStr for TransactionTypeIdFilter { + type Err = String; + + fn from_str(s: &str) -> Result { + match s.to_lowercase().as_str() { + "deploy" => Ok(TransactionTypeIdFilter::Deploy), + "version1" => Ok(TransactionTypeIdFilter::Version1), + _ => Err(format!("Invalid transaction type id: {}", s)), + } + } +} + /// Helper function to specify available filters. /// Input: the database with data to be filtered. /// Return: the filtered data. @@ -15,7 +41,7 @@ pub(super) fn combined_filters( root_filter() .or(root_and_invalid_path()) .or(block_filters(db.clone())) - .or(deploy_filters(db.clone())) + .or(transaction_filters(db.clone())) .or(step_by_era(db.clone())) .or(faults_by_public_key(db.clone())) .or(faults_by_era(db.clone())) @@ -46,16 +72,16 @@ fn block_filters( .or(block_by_height(db)) } -/// Helper function to specify available filters for deploy information. +/// Helper function to specify available filters for transaction information. /// Input: the database with data to be filtered. /// Return: the filtered data. -fn deploy_filters( +fn transaction_filters( db: Db, ) -> impl Filter + Clone { - deploy_by_hash(db.clone()) - .or(deploy_accepted_by_hash(db.clone())) - .or(deploy_processed_by_hash(db.clone())) - .or(deploy_expired_by_hash(db)) + transaction_by_hash(db.clone()) + .or(transaction_accepted_by_hash(db.clone())) + .or(transaction_processed_by_hash(db.clone())) + .or(transaction_expired_by_hash(db)) } /// Return information about the last block added to the linear chain. @@ -127,101 +153,101 @@ fn block_by_height( .and_then(handlers::get_block_by_height) } -/// Return an aggregate of the different states for the given deploy. This is a synthetic JSON not emitted by the node. -/// The output differs depending on the deploy's status, which changes over time as the deploy goes through its lifecycle. +/// Return an aggregate of the different states for the given transaction. This is a synthetic JSON not emitted by the node. +/// The output differs depending on the transaction's status, which changes over time as the transaction goes through its lifecycle. /// Input: the database with data to be filtered. -/// Return: data about the deploy specified. -/// Path URL: deploy/ -/// Example: curl http://127.0.0.1:18888/deploy/f01544d37354c5f9b2c4956826d32f8e44198f94fb6752e87f422fe3071ab58a +/// Return: data about the transaction specified. +/// Path URL: transaction/ +/// Example: curl http://127.0.0.1:18888/transaction/f01544d37354c5f9b2c4956826d32f8e44198f94fb6752e87f422fe3071ab58a #[utoipa::path( get, - path = "/deploy/{deploy_hash}", + path = "/transaction/{transaction_hash}", params( - ("deploy_hash" = String, Path, description = "Base64 encoded deploy hash of requested deploy") + ("transaction_hash" = String, Path, description = "Base64 encoded transaction hash of requested transaction") ), responses( - (status = 200, description = "fetch aggregate data for deploy events", body = DeployAggregate) + (status = 200, description = "fetch aggregate data for transaction events", body = TreansactionAggregate) ) )] -fn deploy_by_hash( +fn transaction_by_hash( db: Db, ) -> impl Filter + Clone { - warp::path!("deploy" / String) + warp::path!("transaction" / TransactionTypeIdFilter / String) .and(warp::get()) .and(with_db(db)) - .and_then(handlers::get_deploy_by_hash) + .and_then(handlers::get_transaction_by_identifier) } -/// Return information about an accepted deploy given its deploy hash. +/// Return information about an accepted transaction given its transaction hash. /// Input: the database with data to be filtered. -/// Return: data about the accepted deploy. -/// Path URL: deploy/accepted/ -/// Example: curl http://127.0.0.1:18888/deploy/accepted/f01544d37354c5f9b2c4956826d32f8e44198f94fb6752e87f422fe3071ab58a +/// Return: data about the accepted transaction. +/// Path URL: transaction/accepted/ +/// Example: curl http://127.0.0.1:18888/transaction/accepted/f01544d37354c5f9b2c4956826d32f8e44198f94fb6752e87f422fe3071ab58a #[utoipa::path( get, - path = "/deploy/accepted/{deploy_hash}", + path = "/transaction/accepted/{transaction_hash}", params( - ("deploy_hash" = String, Path, description = "Base64 encoded deploy hash of requested deploy accepted") + ("transaction_hash" = String, Path, description = "Base64 encoded transaction hash of requested transaction accepted") ), responses( - (status = 200, description = "fetch stored deploy", body = DeployAccepted) + (status = 200, description = "fetch stored transaction", body = TransactionAccepted) ) )] -fn deploy_accepted_by_hash( +fn transaction_accepted_by_hash( db: Db, ) -> impl Filter + Clone { - warp::path!("deploy" / "accepted" / String) + warp::path!("transaction" / TransactionTypeIdFilter / "accepted" / String) .and(warp::get()) .and(with_db(db)) - .and_then(handlers::get_deploy_accepted_by_hash) + .and_then(handlers::get_transaction_accepted_by_hash) } #[utoipa::path( get, - path = "/deploy/expired/{deploy_hash}", + path = "/transaction/expired/{transaction_hash}", params( - ("deploy_hash" = String, Path, description = "Base64 encoded deploy hash of requested deploy expired") + ("transaction_hash" = String, Path, description = "Base64 encoded transaction hash of requested transaction expired") ), responses( - (status = 200, description = "fetch stored deploy", body = DeployExpired) + (status = 200, description = "fetch stored transaction", body = TransactionExpired) ) )] -/// Return information about a deploy that expired given its deploy hash. +/// Return information about a transaction that expired given its transaction hash. /// Input: the database with data to be filtered. -/// Return: data about the expired deploy. -/// Path URL: deploy/expired/ -/// Example: curl http://127.0.0.1:18888/deploy/expired/e03544d37354c5f9b2c4956826d32f8e44198f94fb6752e87f422fe3071ab58a -fn deploy_expired_by_hash( +/// Return: data about the expired transaction. +/// Path URL: transaction/expired/ +/// Example: curl http://127.0.0.1:18888/transaction/expired/e03544d37354c5f9b2c4956826d32f8e44198f94fb6752e87f422fe3071ab58a +fn transaction_expired_by_hash( db: Db, ) -> impl Filter + Clone { - warp::path!("deploy" / "expired" / String) + warp::path!("transaction" / TransactionTypeIdFilter / "expired" / String) .and(warp::get()) .and(with_db(db)) - .and_then(handlers::get_deploy_expired_by_hash) + .and_then(handlers::get_transaction_expired_by_hash) } #[utoipa::path( get, - path = "/deploy/processed/{deploy_hash}", + path = "/transaction/processed/{transaction_hash}", params( - ("deploy_hash" = String, Path, description = "Base64 encoded deploy hash of requested deploy processed") + ("transaction_hash" = String, Path, description = "Base64 encoded transaction hash of requested transaction processed") ), responses( - (status = 200, description = "fetch stored deploy", body = DeployProcessed) + (status = 200, description = "fetch stored transaction", body = TransactionProcessed) ) )] -/// Return information about a deploy that was processed given its deploy hash. +/// Return information about a transaction that was processed given its transaction hash. /// Input: the database with data to be filtered. -/// Return: data about the processed deploy. -/// Path URL: deploy/processed/ -/// Example: curl http://127.0.0.1:18888/deploy/processed/f08944d37354c5f9b2c4956826d32f8e44198f94fb6752e87f422fe3071ab77a -fn deploy_processed_by_hash( +/// Return: data about the processed transaction. +/// Path URL: transaction/processed/ +/// Example: curl http://127.0.0.1:18888/transaction/processed/f08944d37354c5f9b2c4956826d32f8e44198f94fb6752e87f422fe3071ab77a +fn transaction_processed_by_hash( db: Db, ) -> impl Filter + Clone { - warp::path!("deploy" / "processed" / String) + warp::path!("transaction" / TransactionTypeIdFilter / "processed" / String) .and(warp::get()) .and(with_db(db)) - .and_then(handlers::get_deploy_processed_by_hash) + .and_then(handlers::get_transaction_processed_by_hash) } #[utoipa::path( diff --git a/event_sidecar/src/rest_server/handlers.rs b/event_sidecar/src/rest_server/handlers.rs index d4e0ce1a..28bd3310 100644 --- a/event_sidecar/src/rest_server/handlers.rs +++ b/event_sidecar/src/rest_server/handlers.rs @@ -1,4 +1,4 @@ -use super::errors::StorageError; +use super::{errors::StorageError, filters::TransactionTypeIdFilter}; use crate::{ rest_server::errors::InvalidParam, types::database::{DatabaseReadError, DatabaseReader}, @@ -32,39 +32,51 @@ pub(super) async fn get_block_by_height( format_or_reject_storage_result(db_result) } -pub(super) async fn get_deploy_by_hash( +pub(super) async fn get_transaction_by_identifier( + transaction_type: TransactionTypeIdFilter, hash: String, db: Db, ) -> Result { check_hash_is_correct_format(&hash)?; - let db_result = db.get_deploy_aggregate_by_hash(&hash).await; + let db_result = db + .get_transaction_aggregate_by_identifier(&transaction_type.into(), &hash) + .await; format_or_reject_storage_result(db_result) } -pub(super) async fn get_deploy_accepted_by_hash( +pub(super) async fn get_transaction_accepted_by_hash( + transaction_type: TransactionTypeIdFilter, hash: String, db: Db, ) -> Result { check_hash_is_correct_format(&hash)?; - let db_result = db.get_deploy_accepted_by_hash(&hash).await; + let db_result = db + .get_transaction_accepted_by_hash(&transaction_type.into(), &hash) + .await; format_or_reject_storage_result(db_result) } -pub(super) async fn get_deploy_processed_by_hash( +pub(super) async fn get_transaction_processed_by_hash( + transaction_type: TransactionTypeIdFilter, hash: String, db: Db, ) -> Result { check_hash_is_correct_format(&hash)?; - let db_result = db.get_deploy_processed_by_hash(&hash).await; + let db_result = db + .get_transaction_processed_by_hash(&transaction_type.into(), &hash) + .await; format_or_reject_storage_result(db_result) } -pub(super) async fn get_deploy_expired_by_hash( +pub(super) async fn get_transaction_expired_by_hash( + transaction_type: TransactionTypeIdFilter, hash: String, db: Db, ) -> Result { check_hash_is_correct_format(&hash)?; - let db_result = db.get_deploy_expired_by_hash(&hash).await; + let db_result = db + .get_transaction_expired_by_hash(&transaction_type.into(), &hash) + .await; format_or_reject_storage_result(db_result) } diff --git a/event_sidecar/src/rest_server/openapi.rs b/event_sidecar/src/rest_server/openapi.rs index b76f0ee7..c6a774d4 100644 --- a/event_sidecar/src/rest_server/openapi.rs +++ b/event_sidecar/src/rest_server/openapi.rs @@ -1,20 +1,11 @@ mod schema_transformation_visitor; use crate::types::{ - database::DeployAggregate, - sse_events::{BlockAdded, DeployAccepted, DeployExpired, DeployProcessed, Fault, Step}, -}; -use casper_event_types::{ - block::json_compatibility::{ - JsonBlockBody, JsonBlockHeader, JsonEraEnd, JsonEraReport, JsonProof, Reward, - ValidatorWeight, + database::TransactionAggregate, + sse_events::{ + BlockAdded, Fault, Step, TransactionAccepted, TransactionExpired, TransactionProcessed, }, - deploy::{Approval, DeployHeader}, - BlockHash, Deploy, DeployHash, Digest, ExecutableDeployItem, FinalitySignature, JsonBlock, -}; -use casper_types::{ - ContractHash, ContractPackageHash, ContractVersion, ExecutionEffect, ExecutionResult, - RuntimeArgs, }; +use casper_types::RuntimeArgs; use http::Uri; use schemars::{schema::SchemaObject, schema_for, visit::Visitor}; use serde::{Deserialize, Serialize}; @@ -38,10 +29,10 @@ use self::schema_transformation_visitor::SchemaTransformationVisitor; paths(crate::rest_server::filters::latest_block, crate::rest_server::filters::block_by_hash, crate::rest_server::filters::block_by_height, - crate::rest_server::filters::deploy_by_hash, - crate::rest_server::filters::deploy_accepted_by_hash, - crate::rest_server::filters::deploy_expired_by_hash, - crate::rest_server::filters::deploy_processed_by_hash, + crate::rest_server::filters::transaction_by_hash, + crate::rest_server::filters::transaction_accepted_by_hash, + crate::rest_server::filters::transaction_expired_by_hash, + crate::rest_server::filters::transaction_processed_by_hash, crate::rest_server::filters::faults_by_public_key, crate::rest_server::filters::faults_by_era, crate::rest_server::filters::finality_signatures_by_block, @@ -50,7 +41,7 @@ use self::schema_transformation_visitor::SchemaTransformationVisitor; ), components( - schemas(Step, FinalitySignature, Fault, DeployExpired, Deploy, DeployHeader, ExecutableDeployItem, Approval, DeployAggregate, DeployAccepted, DeployProcessed, BlockAdded, JsonBlock, BlockHash, JsonEraEnd, JsonEraReport, JsonBlockBody, JsonBlockHeader, JsonProof, Digest, DeployHash, ValidatorWeight, Reward) + schemas(Step, Fault, TransactionExpired, TransactionAggregate, TransactionAccepted, TransactionProcessed, BlockAdded) ), tags( (name = "event-sidecar", description = "Event-sidecar rest API") @@ -89,15 +80,15 @@ pub fn build_open_api_filters( extend_open_api_with_schemars_schemas( &mut components, vec![ - ("ExecutionResult".to_string(), schema_for!(ExecutionResult)), + //("ExecutionResult".to_string(), schema_for!(ExecutionResult)), ("RuntimeArgs".to_string(), schema_for!(RuntimeArgs)), - ("ContractHash".to_string(), schema_for!(ContractHash)), - ( + //("ContractHash".to_string(), schema_for!(ContractHash)), + /*( "ContractPackageHash".to_string(), schema_for!(ContractPackageHash), ), ("ContractVersion".to_string(), schema_for!(ContractVersion)), - ("ExecutionEffect".to_string(), schema_for!(ExecutionEffect)), + ("ExecutionEffect".to_string(), schema_for!(ExecutionEffect)),*/ ], ); doc.components = Some(components); diff --git a/event_sidecar/src/rest_server/tests.rs b/event_sidecar/src/rest_server/tests.rs index 9c405104..9d1dd393 100644 --- a/event_sidecar/src/rest_server/tests.rs +++ b/event_sidecar/src/rest_server/tests.rs @@ -1,17 +1,17 @@ -use casper_event_types::FinalitySignature as FinSig; use casper_types::AsymmetricType; +use casper_types::FinalitySignature as FinSig; use http::StatusCode; use warp::test::request; use super::filters; use crate::{ testing::fake_database::FakeDatabase, - types::{database::DeployAggregate, sse_events::*}, + types::{database::TransactionAggregate, sse_events::*}, }; // Path elements const BLOCK: &str = "block"; -const DEPLOY: &str = "deploy"; +const TRANSACTION: &str = "transaction"; const FAULTS: &str = "faults"; const SIGNATURES: &str = "signatures"; const STEP: &str = "step"; @@ -43,9 +43,13 @@ async fn root_should_return_400() { #[tokio::test] async fn root_with_invalid_path_should_return_400() { - should_respond_to_path_with("/not_block_or_deploy".to_string(), StatusCode::BAD_REQUEST).await; should_respond_to_path_with( - "/not_block_or_deploy/extra".to_string(), + "/not_block_or_transaction".to_string(), + StatusCode::BAD_REQUEST, + ) + .await; + should_respond_to_path_with( + "/not_block_or_transaction/extra".to_string(), StatusCode::BAD_REQUEST, ) .await; @@ -118,7 +122,7 @@ async fn block_by_height_should_return_valid_data() { } #[tokio::test] -async fn deploy_by_hash_should_return_valid_data() { +async fn transaction_by_hash_should_return_valid_data() { let database = FakeDatabase::new(); let identifiers = database @@ -128,24 +132,22 @@ async fn deploy_by_hash_should_return_valid_data() { let api = filters::combined_filters(database); - let request_path = format!("/{}/{}", DEPLOY, identifiers.deploy_accepted_hash); + let (transaction_hash, transaction_type) = identifiers.transaction_accepted_info; + let request_path = format!("/{}/{}/{}", TRANSACTION, transaction_type, transaction_hash); let response = request().path(&request_path).reply(&api).await; assert!(response.status().is_success()); let body = response.into_body(); - let deploy_aggregate = serde_json::from_slice::(&body) - .expect("Error parsing AggregateDeployInfo from response"); + let transaction_aggregate = serde_json::from_slice::(&body) + .expect("Error parsing AggregateTransactionInfo from response"); - assert_eq!( - deploy_aggregate.deploy_hash, - identifiers.deploy_accepted_hash - ); + assert_eq!(transaction_aggregate.transaction_hash, transaction_hash); } #[tokio::test] -async fn deploy_accepted_by_hash_should_return_valid_data() { +async fn transaction_accepted_by_hash_should_return_valid_data() { let database = FakeDatabase::new(); let identifiers = database @@ -155,9 +157,10 @@ async fn deploy_accepted_by_hash_should_return_valid_data() { let api = filters::combined_filters(database); + let (transaction_hash, transaction_type) = identifiers.transaction_accepted_info; let request_path = format!( - "/{}/{}/{}", - DEPLOY, ACCEPTED, identifiers.deploy_accepted_hash + "/{}/{}/{}/{}", + TRANSACTION, transaction_type, ACCEPTED, transaction_hash ); let response = request().path(&request_path).reply(&api).await; @@ -165,17 +168,14 @@ async fn deploy_accepted_by_hash_should_return_valid_data() { assert!(response.status().is_success()); let body = response.into_body(); - let deploy_accepted = serde_json::from_slice::(&body) - .expect("Error parsing DeployAccepted from response"); + let transaction_accepted = serde_json::from_slice::(&body) + .expect("Error parsing TransactionAccepted from response"); - assert_eq!( - deploy_accepted.hex_encoded_hash(), - identifiers.deploy_accepted_hash - ); + assert_eq!(transaction_accepted.hex_encoded_hash(), transaction_hash); } #[tokio::test] -async fn deploy_processed_by_hash_should_return_valid_data() { +async fn transaction_processed_by_hash_should_return_valid_data() { let database = FakeDatabase::new(); let identifiers = database @@ -184,10 +184,10 @@ async fn deploy_processed_by_hash_should_return_valid_data() { .expect("Error populating FakeDatabase"); let api = filters::combined_filters(database); - + let (transaction_hash, transaction_type) = identifiers.transaction_processed_info; let request_path = format!( - "/{}/{}/{}", - DEPLOY, PROCESSED, identifiers.deploy_processed_hash + "/{}/{}/{}/{}", + TRANSACTION, transaction_type, PROCESSED, transaction_hash ); let response = request().path(&request_path).reply(&api).await; @@ -195,17 +195,14 @@ async fn deploy_processed_by_hash_should_return_valid_data() { assert!(response.status().is_success()); let body = response.into_body(); - let deploy_processed = serde_json::from_slice::(&body) - .expect("Error parsing DeployProcessed from response"); + let transaction_processed = serde_json::from_slice::(&body) + .expect("Error parsing TransactionProcessed from response"); - assert_eq!( - deploy_processed.hex_encoded_hash(), - identifiers.deploy_processed_hash - ); + assert_eq!(transaction_processed.hex_encoded_hash(), transaction_hash); } #[tokio::test] -async fn deploy_expired_by_hash_should_return_valid_data() { +async fn transaction_expired_by_hash_should_return_valid_data() { let database = FakeDatabase::new(); let identifiers = database @@ -214,10 +211,10 @@ async fn deploy_expired_by_hash_should_return_valid_data() { .expect("Error populating FakeDatabase"); let api = filters::combined_filters(database); - + let (transaction_hash, transaction_type) = identifiers.transaction_expired_info; let request_path = format!( - "/{}/{}/{}", - DEPLOY, EXPIRED, identifiers.deploy_expired_hash + "/{}/{}/{}/{}", + TRANSACTION, transaction_type, EXPIRED, transaction_hash ); let response = request().path(&request_path).reply(&api).await; @@ -225,13 +222,10 @@ async fn deploy_expired_by_hash_should_return_valid_data() { assert!(response.status().is_success()); let body = response.into_body(); - let deploy_expired = serde_json::from_slice::(&body) - .expect("Error parsing DeployExpired from response"); + let transaction_expired = serde_json::from_slice::(&body) + .expect("Error parsing TransactionExpired from response"); - assert_eq!( - deploy_expired.hex_encoded_hash(), - identifiers.deploy_expired_hash - ); + assert_eq!(transaction_expired.hex_encoded_hash(), transaction_hash); } #[tokio::test] @@ -350,29 +344,28 @@ async fn block_by_height_of_not_stored_should_return_404() { } #[tokio::test] -async fn deploy_by_hash_of_not_stored_should_return_404() { - let request_path = format!("/{}/{}", DEPLOY, VALID_HASH); +async fn transaction_by_hash_of_not_stored_should_return_404() { + let request_path = format!("/{}/deploy/{}", TRANSACTION, VALID_HASH); should_respond_to_path_with(request_path, StatusCode::NOT_FOUND).await } #[tokio::test] -async fn deploy_accepted_by_hash_of_not_stored_should_return_404() { - let request_path = format!("/{}/{}/{}", DEPLOY, ACCEPTED, VALID_HASH); - +async fn transaction_accepted_by_hash_of_not_stored_should_return_404() { + let request_path = format!("/{}/version1/{}/{}", TRANSACTION, ACCEPTED, VALID_HASH); should_respond_to_path_with(request_path, StatusCode::NOT_FOUND).await } #[tokio::test] -async fn deploy_processed_by_hash_of_not_stored_should_return_404() { - let request_path = format!("/{}/{}/{}", DEPLOY, PROCESSED, VALID_HASH); +async fn transaction_processed_by_hash_of_not_stored_should_return_404() { + let request_path = format!("/{}/deploy/{}/{}", TRANSACTION, PROCESSED, VALID_HASH); should_respond_to_path_with(request_path, StatusCode::NOT_FOUND).await } #[tokio::test] -async fn deploy_expired_by_hash_of_not_stored_should_return_404() { - let request_path = format!("/{}/{}/{}", DEPLOY, EXPIRED, VALID_HASH); +async fn transaction_expired_by_hash_of_not_stored_should_return_404() { + let request_path = format!("/{}/deploy/{}/{}", TRANSACTION, EXPIRED, VALID_HASH); should_respond_to_path_with(request_path, StatusCode::NOT_FOUND).await } @@ -413,29 +406,29 @@ async fn block_by_invalid_hash_should_return_400() { } #[tokio::test] -async fn deploy_by_hash_of_invalid_should_return_400() { - let request_path = format!("/{}/{}", DEPLOY, INVALID_HASH); +async fn transaction_by_hash_of_invalid_should_return_400() { + let request_path = format!("/{}/{}", TRANSACTION, INVALID_HASH); should_respond_to_path_with(request_path, StatusCode::BAD_REQUEST).await } #[tokio::test] -async fn deploy_accepted_by_hash_of_invalid_should_return_400() { - let request_path = format!("/{}/{}/{}", DEPLOY, ACCEPTED, INVALID_HASH); +async fn transaction_accepted_by_hash_of_invalid_should_return_400() { + let request_path = format!("/{}/{}/{}", TRANSACTION, ACCEPTED, INVALID_HASH); should_respond_to_path_with(request_path, StatusCode::BAD_REQUEST).await } #[tokio::test] -async fn deploy_processed_by_hash_of_invalid_should_return_400() { - let request_path = format!("/{}/{}/{}", DEPLOY, PROCESSED, INVALID_HASH); +async fn transaction_processed_by_hash_of_invalid_should_return_400() { + let request_path = format!("/{}/{}/{}", TRANSACTION, PROCESSED, INVALID_HASH); should_respond_to_path_with(request_path, StatusCode::BAD_REQUEST).await } #[tokio::test] -async fn deploy_expired_by_hash_of_invalid_should_return_400() { - let request_path = format!("/{}/{}/{}", DEPLOY, EXPIRED, INVALID_HASH); +async fn transaction_expired_by_hash_of_invalid_should_return_400() { + let request_path = format!("/{}/deploy/{}/{}", TRANSACTION, EXPIRED, INVALID_HASH); should_respond_to_path_with(request_path, StatusCode::BAD_REQUEST).await } diff --git a/event_sidecar/src/sql/tables.rs b/event_sidecar/src/sql/tables.rs index be12e0bb..03d323d6 100644 --- a/event_sidecar/src/sql/tables.rs +++ b/event_sidecar/src/sql/tables.rs @@ -1,8 +1,4 @@ pub mod block_added; -pub mod deploy_accepted; -pub mod deploy_event; -pub mod deploy_expired; -pub mod deploy_processed; pub mod event_log; pub mod event_type; pub mod fault; @@ -10,3 +6,8 @@ pub mod finality_signature; pub mod migration; pub mod shutdown; pub mod step; +pub mod transaction_accepted; +pub mod transaction_event; +pub mod transaction_expired; +pub mod transaction_processed; +pub mod transaction_type; diff --git a/event_sidecar/src/sql/tables/deploy_accepted.rs b/event_sidecar/src/sql/tables/deploy_accepted.rs deleted file mode 100644 index 47f62bd9..00000000 --- a/event_sidecar/src/sql/tables/deploy_accepted.rs +++ /dev/null @@ -1,71 +0,0 @@ -use sea_query::{ - error::Result as SqResult, ColumnDef, Expr, ForeignKey, ForeignKeyAction, Iden, Index, - InsertStatement, Query, SelectStatement, Table, TableCreateStatement, -}; - -use super::event_log::EventLog; - -#[derive(Iden)] -pub(super) enum DeployAccepted { - #[iden = "DeployAccepted"] - Table, - DeployHash, - Raw, - EventLogId, -} - -pub fn create_table_stmt() -> TableCreateStatement { - Table::create() - .table(DeployAccepted::Table) - .if_not_exists() - .col( - ColumnDef::new(DeployAccepted::DeployHash) - .string() - .not_null(), - ) - .col(ColumnDef::new(DeployAccepted::Raw).text().not_null()) - .col( - ColumnDef::new(DeployAccepted::EventLogId) - .big_unsigned() - .not_null(), - ) - .index( - Index::create() - .primary() - .name("PDX_DeployAccepted") - .col(DeployAccepted::DeployHash), - ) - .foreign_key( - ForeignKey::create() - .name("FK_event_log_id") - .from(DeployAccepted::Table, DeployAccepted::EventLogId) - .to(EventLog::Table, EventLog::EventLogId) - .on_delete(ForeignKeyAction::Restrict) - .on_update(ForeignKeyAction::Restrict), - ) - .to_owned() -} - -pub fn create_insert_stmt( - deploy_hash: String, - raw: String, - event_log_id: u64, -) -> SqResult { - Query::insert() - .into_table(DeployAccepted::Table) - .columns([ - DeployAccepted::DeployHash, - DeployAccepted::Raw, - DeployAccepted::EventLogId, - ]) - .values(vec![deploy_hash.into(), raw.into(), event_log_id.into()]) - .map(|stmt| stmt.to_owned()) -} - -pub fn create_get_by_hash_stmt(deploy_hash: String) -> SelectStatement { - Query::select() - .column(DeployAccepted::Raw) - .from(DeployAccepted::Table) - .and_where(Expr::col(DeployAccepted::DeployHash).eq(deploy_hash)) - .to_owned() -} diff --git a/event_sidecar/src/sql/tables/deploy_event.rs b/event_sidecar/src/sql/tables/deploy_event.rs deleted file mode 100644 index 84f87b79..00000000 --- a/event_sidecar/src/sql/tables/deploy_event.rs +++ /dev/null @@ -1,51 +0,0 @@ -use sea_query::{ - error::Result as SqResult, ColumnDef, ForeignKey, ForeignKeyAction, Iden, Index, - InsertStatement, Query, Table, TableCreateStatement, -}; - -use super::event_log::EventLog; - -#[derive(Iden)] -pub(super) enum DeployEvent { - Table, - EventLogId, - DeployHash, -} - -pub fn create_table_stmt() -> TableCreateStatement { - Table::create() - .table(DeployEvent::Table) - .if_not_exists() - .col( - ColumnDef::new(DeployEvent::EventLogId) - .big_unsigned() - .not_null(), - ) - .col(ColumnDef::new(DeployEvent::DeployHash).string().not_null()) - .index( - Index::create() - .primary() - .name("PDX_DeployEvent") - .col(DeployEvent::DeployHash) - .col(DeployEvent::EventLogId), - ) - .foreign_key( - ForeignKey::create() - .name("FK_event_log_id") - .from(DeployEvent::Table, DeployEvent::EventLogId) - .to(EventLog::Table, EventLog::EventLogId) - .on_delete(ForeignKeyAction::Restrict) - .on_update(ForeignKeyAction::Restrict), - ) - .to_owned() -} - -pub fn create_insert_stmt(event_log_id: u64, deploy_hash: String) -> SqResult { - let insert_stmt = Query::insert() - .into_table(DeployEvent::Table) - .columns([DeployEvent::EventLogId, DeployEvent::DeployHash]) - .values(vec![event_log_id.into(), deploy_hash.into()])? - .to_owned(); - - Ok(insert_stmt) -} diff --git a/event_sidecar/src/sql/tables/deploy_expired.rs b/event_sidecar/src/sql/tables/deploy_expired.rs deleted file mode 100644 index dd5ca8f5..00000000 --- a/event_sidecar/src/sql/tables/deploy_expired.rs +++ /dev/null @@ -1,71 +0,0 @@ -use sea_query::{ - error::Result as SqResult, ColumnDef, Expr, ForeignKey, ForeignKeyAction, Iden, Index, - InsertStatement, Query, SelectStatement, Table, TableCreateStatement, -}; - -use super::event_log::EventLog; - -#[derive(Iden)] -pub(super) enum DeployExpired { - #[iden = "DeployExpired"] - Table, - DeployHash, - Raw, - EventLogId, -} - -pub fn create_table_stmt() -> TableCreateStatement { - Table::create() - .table(DeployExpired::Table) - .if_not_exists() - .col( - ColumnDef::new(DeployExpired::DeployHash) - .string() - .not_null(), - ) - .col(ColumnDef::new(DeployExpired::Raw).text().not_null()) - .col( - ColumnDef::new(DeployExpired::EventLogId) - .big_unsigned() - .not_null(), - ) - .index( - Index::create() - .primary() - .name("PDX_DeployExpired") - .col(DeployExpired::DeployHash), - ) - .foreign_key( - ForeignKey::create() - .name("FK_event_log_id") - .from(DeployExpired::Table, DeployExpired::EventLogId) - .to(EventLog::Table, EventLog::EventLogId) - .on_delete(ForeignKeyAction::Restrict) - .on_update(ForeignKeyAction::Restrict), - ) - .to_owned() -} - -pub fn create_insert_stmt( - deploy_hash: String, - event_log_id: u64, - raw: String, -) -> SqResult { - Query::insert() - .into_table(DeployExpired::Table) - .columns([ - DeployExpired::DeployHash, - DeployExpired::EventLogId, - DeployExpired::Raw, - ]) - .values(vec![deploy_hash.into(), event_log_id.into(), raw.into()]) - .map(|stmt| stmt.to_owned()) -} - -pub fn create_get_by_hash_stmt(deploy_hash: String) -> SelectStatement { - Query::select() - .column(DeployExpired::Raw) - .from(DeployExpired::Table) - .and_where(Expr::col(DeployExpired::DeployHash).eq(deploy_hash)) - .to_owned() -} diff --git a/event_sidecar/src/sql/tables/deploy_processed.rs b/event_sidecar/src/sql/tables/deploy_processed.rs deleted file mode 100644 index 198b0cd9..00000000 --- a/event_sidecar/src/sql/tables/deploy_processed.rs +++ /dev/null @@ -1,71 +0,0 @@ -use sea_query::{ - error::Result as SqResult, ColumnDef, Expr, ForeignKey, ForeignKeyAction, Iden, Index, - InsertStatement, Query, SelectStatement, Table, TableCreateStatement, -}; - -use super::event_log::EventLog; - -#[derive(Iden)] -pub enum DeployProcessed { - #[iden = "DeployProcessed"] - Table, - DeployHash, - Raw, - EventLogId, -} - -pub fn create_table_stmt() -> TableCreateStatement { - Table::create() - .table(DeployProcessed::Table) - .if_not_exists() - .col( - ColumnDef::new(DeployProcessed::DeployHash) - .string() - .not_null(), - ) - .col(ColumnDef::new(DeployProcessed::Raw).text().not_null()) - .col( - ColumnDef::new(DeployProcessed::EventLogId) - .big_unsigned() - .not_null(), - ) - .index( - Index::create() - .primary() - .name("PDX_DeployProcessed") - .col(DeployProcessed::DeployHash), - ) - .foreign_key( - ForeignKey::create() - .name("FK_event_log_id") - .from(DeployProcessed::Table, DeployProcessed::EventLogId) - .to(EventLog::Table, EventLog::EventLogId) - .on_delete(ForeignKeyAction::Restrict) - .on_update(ForeignKeyAction::Restrict), - ) - .to_owned() -} - -pub fn create_insert_stmt( - deploy_hash: String, - raw: String, - event_log_id: u64, -) -> SqResult { - Query::insert() - .into_table(DeployProcessed::Table) - .columns([ - DeployProcessed::DeployHash, - DeployProcessed::Raw, - DeployProcessed::EventLogId, - ]) - .values(vec![deploy_hash.into(), raw.into(), event_log_id.into()]) - .map(|stmt| stmt.to_owned()) -} - -pub fn create_get_by_hash_stmt(deploy_hash: String) -> SelectStatement { - Query::select() - .column(DeployProcessed::Raw) - .from(DeployProcessed::Table) - .and_where(Expr::col(DeployProcessed::DeployHash).eq(deploy_hash)) - .to_owned() -} diff --git a/event_sidecar/src/sql/tables/event_type.rs b/event_sidecar/src/sql/tables/event_type.rs index 39326b8e..838211c0 100644 --- a/event_sidecar/src/sql/tables/event_type.rs +++ b/event_sidecar/src/sql/tables/event_type.rs @@ -13,9 +13,9 @@ pub(super) enum EventType { pub enum EventTypeId { BlockAdded = 1, - DeployAccepted = 2, - DeployExpired = 3, - DeployProcessed = 4, + TransactionAccepted = 2, + TransactionExpired = 3, + TransactionProcessed = 4, Fault = 5, FinalitySignature = 6, Step = 7, @@ -50,16 +50,16 @@ pub fn create_initialise_stmt() -> SqResult { "BlockAdded".into(), ])? .values(vec![ - (EventTypeId::DeployAccepted as u8).into(), - "DeployAccepted".into(), + (EventTypeId::TransactionAccepted as u8).into(), + "TransactionAccepted".into(), ])? .values(vec![ - (EventTypeId::DeployExpired as u8).into(), - "DeployExpired".into(), + (EventTypeId::TransactionExpired as u8).into(), + "TransactionExpired".into(), ])? .values(vec![ - (EventTypeId::DeployProcessed as u8).into(), - "DeployProcessed".into(), + (EventTypeId::TransactionProcessed as u8).into(), + "TransactionProcessed".into(), ])? .values(vec![(EventTypeId::Fault as u8).into(), "Fault".into()])? .values(vec![ @@ -82,7 +82,7 @@ pub fn create_initialise_stmt() -> SqResult { #[test] fn create_initialise_stmt_sql() { use sea_query::SqliteQueryBuilder; - let expected_sql = "INSERT INTO \"event_type\" (\"event_type_id\", \"event_type_name\") VALUES (1, 'BlockAdded'), (2, 'DeployAccepted'), (3, 'DeployExpired'), (4, 'DeployProcessed'), (5, 'Fault'), (6, 'FinalitySignature'), (7, 'Step'), (8, 'Shutdown') ON CONFLICT (\"event_type_id\") DO NOTHING"; + let expected_sql = "INSERT INTO \"event_type\" (\"event_type_id\", \"event_type_name\") VALUES (1, 'BlockAdded'), (2, 'TransactionAccepted'), (3, 'TransactionExpired'), (4, 'TransactionProcessed'), (5, 'Fault'), (6, 'FinalitySignature'), (7, 'Step'), (8, 'Shutdown') ON CONFLICT (\"event_type_id\") DO NOTHING"; let got_sql = create_initialise_stmt() .unwrap() diff --git a/event_sidecar/src/sql/tables/transaction_accepted.rs b/event_sidecar/src/sql/tables/transaction_accepted.rs new file mode 100644 index 00000000..c181a692 --- /dev/null +++ b/event_sidecar/src/sql/tables/transaction_accepted.rs @@ -0,0 +1,112 @@ +use sea_query::{ + error::Result as SqResult, ColumnDef, Expr, ForeignKey, ForeignKeyAction, Iden, Index, + InsertStatement, Query, SelectStatement, Table, TableCreateStatement, +}; + +use super::{event_log::EventLog, transaction_type::TransactionType}; + +#[derive(Iden)] +pub(super) enum TransactionAccepted { + #[iden = "TransactionAccepted"] + Table, + TransactionHash, + TransactionTypeId, + Raw, + EventLogId, +} + +pub fn create_table_stmt() -> TableCreateStatement { + Table::create() + .table(TransactionAccepted::Table) + .if_not_exists() + .col( + ColumnDef::new(TransactionAccepted::TransactionHash) + .string() + .not_null(), + ) + .col( + ColumnDef::new(TransactionAccepted::TransactionTypeId) + .tiny_unsigned() + .not_null(), + ) + .col(ColumnDef::new(TransactionAccepted::Raw).text().not_null()) + .col( + ColumnDef::new(TransactionAccepted::EventLogId) + .big_unsigned() + .not_null(), + ) + .index( + &mut primary_key(), + ) + .foreign_key( + &mut event_log_fk(), + ) + .foreign_key( + &mut transaction_type_fk(), + ) + .to_owned() +} + +fn transaction_type_fk() -> sea_query::ForeignKeyCreateStatement { + ForeignKey::create() + .name("FK_transaction_type_id") + .from( + TransactionAccepted::Table, + TransactionAccepted::TransactionTypeId, + ) + .to(TransactionType::Table, TransactionType::TransactionTypeId) + .on_delete(ForeignKeyAction::Restrict) + .on_update(ForeignKeyAction::Restrict) + .to_owned() +} + +fn event_log_fk() -> sea_query::ForeignKeyCreateStatement { + ForeignKey::create() + .name("FK_event_log_id") + .from(TransactionAccepted::Table, TransactionAccepted::EventLogId) + .to(EventLog::Table, EventLog::EventLogId) + .on_delete(ForeignKeyAction::Restrict) + .on_update(ForeignKeyAction::Restrict) + .to_owned() +} + +fn primary_key() -> sea_query::IndexCreateStatement { + Index::create() + .name("PDX_TransactionAccepted") + .col(TransactionAccepted::TransactionTypeId) + .col(TransactionAccepted::TransactionHash) + .primary() + .to_owned() +} + +pub fn create_insert_stmt( + transaction_type: u8, + transaction_hash: String, + raw: String, + event_log_id: u64, +) -> SqResult { + Query::insert() + .into_table(TransactionAccepted::Table) + .columns([ + TransactionAccepted::TransactionTypeId, + TransactionAccepted::TransactionHash, + TransactionAccepted::Raw, + TransactionAccepted::EventLogId, + ]) + .values(vec![ + transaction_type.into(), + transaction_hash.into(), + raw.into(), + event_log_id.into(), + ]) + .map(|stmt| stmt.to_owned()) +} + +pub fn create_get_by_hash_stmt(transaction_type: u8, transaction_hash: String) -> SelectStatement { + Query::select() + .column(TransactionAccepted::Raw) + .from(TransactionAccepted::Table) + .and_where(Expr::col(TransactionAccepted::TransactionTypeId).eq(transaction_type)) + .and_where(Expr::col(TransactionAccepted::TransactionHash).eq(transaction_hash)) + .to_owned() +} diff --git a/event_sidecar/src/sql/tables/transaction_event.rs b/event_sidecar/src/sql/tables/transaction_event.rs new file mode 100644 index 00000000..f42fc97a --- /dev/null +++ b/event_sidecar/src/sql/tables/transaction_event.rs @@ -0,0 +1,97 @@ +use sea_query::{ + error::Result as SqResult, ColumnDef, ForeignKey, ForeignKeyAction, Iden, Index, + InsertStatement, Query, Table, TableCreateStatement, +}; + +use super::{event_log::EventLog, transaction_type::TransactionType}; + +#[derive(Iden)] +pub(super) enum TransactionEvent { + Table, + EventLogId, + TransactionTypeId, + TransactionHash, +} + +pub fn create_table_stmt() -> TableCreateStatement { + Table::create() + .table(TransactionEvent::Table) + .if_not_exists() + .col( + ColumnDef::new(TransactionEvent::EventLogId) + .big_unsigned() + .not_null(), + ) + .col( + ColumnDef::new(TransactionEvent::TransactionHash) + .string() + .not_null(), + ) + .col( + ColumnDef::new(TransactionEvent::TransactionTypeId) + .tiny_unsigned() + .not_null(), + ) + .index( + &mut primary_key(), + ) + .foreign_key( + &mut event_log_fk(), + ) + .foreign_key( + &mut transaction_type_fk(), + ) + .to_owned() +} + +fn transaction_type_fk() -> sea_query::ForeignKeyCreateStatement { + ForeignKey::create() + .name("FK_transaction_type_id") + .from(TransactionEvent::Table, TransactionEvent::TransactionTypeId) + .to(TransactionType::Table, TransactionType::TransactionTypeId) + .on_delete(ForeignKeyAction::Restrict) + .on_update(ForeignKeyAction::Restrict) + .to_owned() +} + +fn event_log_fk() -> sea_query::ForeignKeyCreateStatement { + ForeignKey::create() + .name("FK_event_log_id") + .from(TransactionEvent::Table, TransactionEvent::EventLogId) + .to(EventLog::Table, EventLog::EventLogId) + .on_delete(ForeignKeyAction::Restrict) + .on_update(ForeignKeyAction::Restrict) + .to_owned() +} + +fn primary_key() -> sea_query::IndexCreateStatement { + Index::create() + .primary() + .name("PDX_TransactionEvent") + .col(TransactionEvent::TransactionHash) + .col(TransactionEvent::TransactionTypeId) + .col(TransactionEvent::EventLogId) + .to_owned() +} + +pub fn create_insert_stmt( + event_log_id: u64, + transaction_type: u8, + transaction_hash: String, +) -> SqResult { + let insert_stmt = Query::insert() + .into_table(TransactionEvent::Table) + .columns([ + TransactionEvent::TransactionTypeId, + TransactionEvent::EventLogId, + TransactionEvent::TransactionHash, + ]) + .values(vec![ + transaction_type.into(), + event_log_id.into(), + transaction_hash.into(), + ])? + .to_owned(); + + Ok(insert_stmt) +} diff --git a/event_sidecar/src/sql/tables/transaction_expired.rs b/event_sidecar/src/sql/tables/transaction_expired.rs new file mode 100644 index 00000000..ca36ffd9 --- /dev/null +++ b/event_sidecar/src/sql/tables/transaction_expired.rs @@ -0,0 +1,111 @@ +use sea_query::{ + error::Result as SqResult, ColumnDef, Expr, ForeignKey, ForeignKeyAction, Iden, Index, + InsertStatement, Query, SelectStatement, Table, TableCreateStatement, +}; + +use super::{event_log::EventLog, transaction_type::TransactionType}; + +#[derive(Iden)] +pub(super) enum TransactionExpired { + #[iden = "TransactionExpired"] + Table, + TransactionTypeId, + TransactionHash, + Raw, + EventLogId, +} + +pub fn create_table_stmt() -> TableCreateStatement { + Table::create() + .table(TransactionExpired::Table) + .if_not_exists() + .col( + ColumnDef::new(TransactionExpired::TransactionHash) + .string() + .not_null(), + ) + .col( + ColumnDef::new(TransactionExpired::TransactionTypeId) + .tiny_unsigned() + .not_null(), + ) + .col(ColumnDef::new(TransactionExpired::Raw).text().not_null()) + .col( + ColumnDef::new(TransactionExpired::EventLogId) + .big_unsigned() + .not_null(), + ) + .index( + &mut primary_key(), + ) + .foreign_key( + &mut event_log_fk(), + ) + .foreign_key( + &mut transaction_type_fk(), + ) + .to_owned() +} + +fn transaction_type_fk() -> sea_query::ForeignKeyCreateStatement { + ForeignKey::create() + .name("FK_transaction_type_id") + .from( + TransactionExpired::Table, + TransactionExpired::TransactionTypeId, + ) + .to(TransactionType::Table, TransactionType::TransactionTypeId) + .on_delete(ForeignKeyAction::Restrict) + .on_update(ForeignKeyAction::Restrict) + .to_owned() +} + +fn event_log_fk() -> sea_query::ForeignKeyCreateStatement { + ForeignKey::create() + .name("FK_event_log_id") + .from(TransactionExpired::Table, TransactionExpired::EventLogId) + .to(EventLog::Table, EventLog::EventLogId) + .on_delete(ForeignKeyAction::Restrict) + .on_update(ForeignKeyAction::Restrict) + .to_owned() +} + +fn primary_key() -> sea_query::IndexCreateStatement { + Index::create() + .primary() + .name("PDX_TransactionExpired") + .col(TransactionExpired::TransactionHash) + .to_owned() +} + +pub fn create_insert_stmt( + transaction_type: u8, + transaction_hash: String, + event_log_id: u64, + raw: String, +) -> SqResult { + Query::insert() + .into_table(TransactionExpired::Table) + .columns([ + TransactionExpired::TransactionTypeId, + TransactionExpired::TransactionHash, + TransactionExpired::EventLogId, + TransactionExpired::Raw, + ]) + .values(vec![ + transaction_type.into(), + transaction_hash.into(), + event_log_id.into(), + raw.into(), + ]) + .map(|stmt| stmt.to_owned()) +} + +pub fn create_get_by_hash_stmt(transaction_type: u8, transaction_hash: String) -> SelectStatement { + Query::select() + .column(TransactionExpired::Raw) + .from(TransactionExpired::Table) + .and_where(Expr::col(TransactionExpired::TransactionTypeId).eq(transaction_type)) + .and_where(Expr::col(TransactionExpired::TransactionHash).eq(transaction_hash)) + .to_owned() +} diff --git a/event_sidecar/src/sql/tables/transaction_processed.rs b/event_sidecar/src/sql/tables/transaction_processed.rs new file mode 100644 index 00000000..dc628bfe --- /dev/null +++ b/event_sidecar/src/sql/tables/transaction_processed.rs @@ -0,0 +1,115 @@ +use sea_query::{ + error::Result as SqResult, ColumnDef, Expr, ForeignKey, ForeignKeyAction, Iden, Index, + InsertStatement, Query, SelectStatement, Table, TableCreateStatement, +}; + +use super::{event_log::EventLog, transaction_type::TransactionType}; + +#[derive(Iden)] +pub enum TransactionProcessed { + #[iden = "TransactionProcessed"] + Table, + TransactionHash, + TransactionTypeId, + Raw, + EventLogId, +} + +pub fn create_table_stmt() -> TableCreateStatement { + Table::create() + .table(TransactionProcessed::Table) + .if_not_exists() + .col( + ColumnDef::new(TransactionProcessed::TransactionHash) + .string() + .not_null(), + ) + .col( + ColumnDef::new(TransactionProcessed::TransactionTypeId) + .tiny_unsigned() + .not_null(), + ) + .col(ColumnDef::new(TransactionProcessed::Raw).text().not_null()) + .col( + ColumnDef::new(TransactionProcessed::EventLogId) + .big_unsigned() + .not_null(), + ) + .index( + &mut primary_key(), + ) + .foreign_key( + &mut event_log_fk(), + ) + .foreign_key( + &mut transaction_type_fk(), + ) + .to_owned() +} + +fn transaction_type_fk() -> sea_query::ForeignKeyCreateStatement { + ForeignKey::create() + .name("FK_transaction_type_id") + .from( + TransactionProcessed::Table, + TransactionProcessed::TransactionTypeId, + ) + .to(TransactionType::Table, TransactionType::TransactionTypeId) + .on_delete(ForeignKeyAction::Restrict) + .on_update(ForeignKeyAction::Restrict) + .to_owned() +} + +fn event_log_fk() -> sea_query::ForeignKeyCreateStatement { + ForeignKey::create() + .name("FK_event_log_id") + .from( + TransactionProcessed::Table, + TransactionProcessed::EventLogId, + ) + .to(EventLog::Table, EventLog::EventLogId) + .on_delete(ForeignKeyAction::Restrict) + .on_update(ForeignKeyAction::Restrict) + .to_owned() +} + +fn primary_key() -> sea_query::IndexCreateStatement { + Index::create() + .name("PDX_TransactionProcessed") + .col(TransactionProcessed::TransactionHash) + .col(TransactionProcessed::TransactionTypeId) + .primary() + .to_owned() +} + +pub fn create_insert_stmt( + transaction_type: u8, + transaction_hash: String, + raw: String, + event_log_id: u64, +) -> SqResult { + Query::insert() + .into_table(TransactionProcessed::Table) + .columns([ + TransactionProcessed::TransactionTypeId, + TransactionProcessed::TransactionHash, + TransactionProcessed::Raw, + TransactionProcessed::EventLogId, + ]) + .values(vec![ + transaction_type.into(), + transaction_hash.into(), + raw.into(), + event_log_id.into(), + ]) + .map(|stmt| stmt.to_owned()) +} + +pub fn create_get_by_hash_stmt(transaction_type: u8, transaction_hash: String) -> SelectStatement { + Query::select() + .column(TransactionProcessed::Raw) + .from(TransactionProcessed::Table) + .and_where(Expr::col(TransactionProcessed::TransactionTypeId).eq(transaction_type)) + .and_where(Expr::col(TransactionProcessed::TransactionHash).eq(transaction_hash)) + .to_owned() +} diff --git a/event_sidecar/src/sql/tables/transaction_type.rs b/event_sidecar/src/sql/tables/transaction_type.rs new file mode 100644 index 00000000..e5c7ef23 --- /dev/null +++ b/event_sidecar/src/sql/tables/transaction_type.rs @@ -0,0 +1,61 @@ +use sea_query::{ + error::Result as SqResult, ColumnDef, Iden, InsertStatement, OnConflict, Query, Table, + TableCreateStatement, +}; + +#[derive(Clone)] +pub enum TransactionTypeId { + Deploy = 0, + Version1 = 1, +} + +#[allow(clippy::enum_variant_names)] +#[derive(Iden)] +pub(super) enum TransactionType { + #[iden = "TransactionType"] + Table, + TransactionTypeId, + TransactionTypeName, +} + +pub fn create_table_stmt() -> TableCreateStatement { + Table::create() + .table(TransactionType::Table) + .if_not_exists() + .col( + ColumnDef::new(TransactionType::TransactionTypeId) + .integer() + .not_null() + .primary_key(), + ) + .col( + ColumnDef::new(TransactionType::TransactionTypeName) + .string() + .not_null() + .unique_key(), + ) + .to_owned() +} + +pub fn create_initialise_stmt() -> SqResult { + Ok(Query::insert() + .into_table(TransactionType::Table) + .columns([ + TransactionType::TransactionTypeId, + TransactionType::TransactionTypeName, + ]) + .values(vec![ + (TransactionTypeId::Deploy as u8).into(), + "Deploy".into(), + ])? + .values(vec![ + (TransactionTypeId::Version1 as u8).into(), + "Version1".into(), + ])? + .on_conflict( + OnConflict::column(TransactionType::TransactionTypeId) + .do_nothing() + .to_owned(), + ) + .to_owned()) +} diff --git a/event_sidecar/src/testing/fake_database.rs b/event_sidecar/src/testing/fake_database.rs index 8315b213..b3c951ed 100644 --- a/event_sidecar/src/testing/fake_database.rs +++ b/event_sidecar/src/testing/fake_database.rs @@ -7,12 +7,13 @@ use casper_types::testing::TestRng; use casper_types::AsymmetricType; use rand::Rng; -use casper_event_types::FinalitySignature as FinSig; +use casper_types::FinalitySignature as FinSig; +use crate::types::database::TransactionTypeId; use crate::types::{ database::{ - DatabaseReadError, DatabaseReader, DatabaseWriteError, DatabaseWriter, DeployAggregate, - Migration, + DatabaseReadError, DatabaseReader, DatabaseWriteError, DatabaseWriter, Migration, + TransactionAggregate, }, sse_events::*, }; @@ -29,15 +30,17 @@ impl FakeDatabase { } } + /// Creates random SSE event data and saves them, returning the identifiers for each record. + #[allow(clippy::too_many_lines)] pub(crate) async fn populate_with_events( &self, ) -> Result { let mut rng = TestRng::new(); let block_added = BlockAdded::random(&mut rng); - let deploy_accepted = DeployAccepted::random(&mut rng); - let deploy_processed = DeployProcessed::random(&mut rng, None); - let deploy_expired = DeployExpired::random(&mut rng, None); + let transaction_accepted = TransactionAccepted::random(&mut rng); + let transaction_processed = TransactionProcessed::random(&mut rng, None); + let transaction_expired = TransactionExpired::random(&mut rng, None); let fault = Fault::random(&mut rng); let finality_signature = FinalitySignature::random(&mut rng); let step = Step::random(&mut rng); @@ -45,9 +48,18 @@ impl FakeDatabase { let test_stored_keys = IdentifiersForStoredEvents { block_added_hash: block_added.hex_encoded_hash(), block_added_height: block_added.get_height(), - deploy_accepted_hash: deploy_accepted.hex_encoded_hash(), - deploy_processed_hash: deploy_processed.hex_encoded_hash(), - deploy_expired_hash: deploy_expired.hex_encoded_hash(), + transaction_accepted_info: ( + transaction_accepted.hex_encoded_hash(), + transaction_accepted.api_transaction_type_id(), + ), + transaction_processed_info: ( + transaction_processed.hex_encoded_hash(), + transaction_processed.api_transaction_type_id(), + ), + transaction_expired_info: ( + transaction_expired.hex_encoded_hash(), + transaction_expired.api_transaction_type_id(), + ), fault_era_id: fault.era_id.value(), fault_public_key: fault.public_key.to_hex(), finality_signatures_block_hash: finality_signature.hex_encoded_block_hash(), @@ -56,11 +68,11 @@ impl FakeDatabase { self.save_block_added_with_event_log_data(block_added, &mut rng) .await?; - self.save_deploy_accepted_with_event_log_data(deploy_accepted, &mut rng) + self.save_transaction_accepted_with_event_log_data(transaction_accepted, &mut rng) .await?; - self.save_deploy_processed_with_event_log_data(deploy_processed, &mut rng) + self.save_transaction_processed_with_event_log_data(transaction_processed, &mut rng) .await?; - self.save_deploy_expired_with_event_log_data(deploy_expired, &mut rng) + self.save_transaction_expired_with_event_log_data(transaction_expired, &mut rng) .await?; self.save_fault_with_event_log_data(fault, &mut rng).await?; self.save_finality_signature_with_event_log_data(finality_signature, &mut rng) @@ -115,13 +127,13 @@ impl FakeDatabase { Ok(()) } - async fn save_deploy_expired_with_event_log_data( + async fn save_transaction_expired_with_event_log_data( &self, - deploy_expired: DeployExpired, + transaction_expired: TransactionExpired, rng: &mut TestRng, ) -> Result<(), DatabaseWriteError> { - self.save_deploy_expired( - deploy_expired, + self.save_transaction_expired( + transaction_expired, rng.gen(), "127.0.0.1".to_string(), "1.1.1".to_string(), @@ -130,13 +142,13 @@ impl FakeDatabase { Ok(()) } - async fn save_deploy_processed_with_event_log_data( + async fn save_transaction_processed_with_event_log_data( &self, - deploy_processed: DeployProcessed, + transaction_processed: TransactionProcessed, rng: &mut TestRng, ) -> Result<(), DatabaseWriteError> { - self.save_deploy_processed( - deploy_processed, + self.save_transaction_processed( + transaction_processed, rng.gen(), "127.0.0.1".to_string(), "1.1.1".to_string(), @@ -145,13 +157,13 @@ impl FakeDatabase { Ok(()) } - async fn save_deploy_accepted_with_event_log_data( + async fn save_transaction_accepted_with_event_log_data( &self, - deploy_accepted: DeployAccepted, + transaction_accepted: TransactionAccepted, rng: &mut TestRng, ) -> Result<(), DatabaseWriteError> { - self.save_deploy_accepted( - deploy_accepted, + self.save_transaction_accepted( + transaction_accepted, rng.gen(), "127.0.0.1".to_string(), "1.1.1".to_string(), @@ -203,20 +215,20 @@ impl DatabaseWriter for FakeDatabase { } #[allow(unused)] - async fn save_deploy_accepted( + async fn save_transaction_accepted( &self, - deploy_accepted: DeployAccepted, + transaction_accepted: TransactionAccepted, event_id: u32, event_source_address: String, api_version: String, ) -> Result { let mut data = self.data.lock().expect("Error acquiring lock on data"); - let hash = deploy_accepted.hex_encoded_hash(); - // This is suffixed to allow storage of each deploy state event without overwriting. + let hash = transaction_accepted.hex_encoded_hash(); + // This is suffixed to allow storage of each transaction state event without overwriting. let identifier = format!("{}-accepted", hash); let stringified_event = - serde_json::to_string(&deploy_accepted).expect("Error serialising event data"); + serde_json::to_string(&transaction_accepted).expect("Error serialising event data"); data.insert(identifier, stringified_event); @@ -224,20 +236,20 @@ impl DatabaseWriter for FakeDatabase { } #[allow(unused)] - async fn save_deploy_processed( + async fn save_transaction_processed( &self, - deploy_processed: DeployProcessed, + transaction_processed: TransactionProcessed, event_id: u32, event_source_address: String, api_version: String, ) -> Result { let mut data = self.data.lock().expect("Error acquiring lock on data"); - let hash = deploy_processed.hex_encoded_hash(); - // This is suffixed to allow storage of each deploy state event without overwriting. + let hash = transaction_processed.hex_encoded_hash(); + // This is suffixed to allow storage of each transaction state event without overwriting. let identifier = format!("{}-processed", hash); let stringified_event = - serde_json::to_string(&deploy_processed).expect("Error serialising event data"); + serde_json::to_string(&transaction_processed).expect("Error serialising event data"); data.insert(identifier, stringified_event); @@ -245,20 +257,20 @@ impl DatabaseWriter for FakeDatabase { } #[allow(unused)] - async fn save_deploy_expired( + async fn save_transaction_expired( &self, - deploy_expired: DeployExpired, + transaction_expired: TransactionExpired, event_id: u32, event_source_address: String, api_version: String, ) -> Result { let mut data = self.data.lock().expect("Error acquiring lock on data"); - let hash = deploy_expired.hex_encoded_hash(); - // This is suffixed to allow storage of each deploy state event without overwriting. + let hash = transaction_expired.hex_encoded_hash(); + // This is suffixed to allow storage of each transaction state event without overwriting. let identifier = format!("{}-expired", hash); let stringified_event = - serde_json::to_string(&deploy_expired).expect("Error serialising event data"); + serde_json::to_string(&transaction_expired).expect("Error serialising event data"); data.insert(identifier, stringified_event); @@ -382,10 +394,11 @@ impl DatabaseReader for FakeDatabase { }; } - async fn get_deploy_aggregate_by_hash( + async fn get_transaction_aggregate_by_identifier( &self, + _transaction_type: &TransactionTypeId, hash: &str, - ) -> Result { + ) -> Result { let data = self.data.lock().expect("Error acquiring lock on data"); let accepted_key = format!("{}-accepted", hash); @@ -393,39 +406,39 @@ impl DatabaseReader for FakeDatabase { let expired_key = format!("{}-expired", hash); return if let Some(accepted) = data.get(&accepted_key) { - let deploy_accepted = serde_json::from_str::(accepted) + let transaction_accepted = serde_json::from_str::(accepted) .map_err(DatabaseReadError::Serialisation)?; if let Some(processed) = data.get(&processed_key) { - let deploy_processed = serde_json::from_str::(processed) + let transaction_processed = serde_json::from_str::(processed) .map_err(DatabaseReadError::Serialisation)?; - Ok(DeployAggregate { - deploy_hash: hash.to_string(), - deploy_accepted: Some(deploy_accepted), - deploy_processed: Some(deploy_processed), - deploy_expired: false, + Ok(TransactionAggregate { + transaction_hash: hash.to_string(), + transaction_accepted: Some(transaction_accepted), + transaction_processed: Some(transaction_processed), + transaction_expired: false, }) } else if data.get(&expired_key).is_some() { - let deploy_expired = match data.get(&expired_key) { + let transaction_expired = match data.get(&expired_key) { None => None, Some(raw) => Some( - serde_json::from_str::(raw) + serde_json::from_str::(raw) .map_err(DatabaseReadError::Serialisation)?, ), }; - Ok(DeployAggregate { - deploy_hash: hash.to_string(), - deploy_accepted: Some(deploy_accepted), - deploy_processed: None, - deploy_expired: deploy_expired.is_some(), + Ok(TransactionAggregate { + transaction_hash: hash.to_string(), + transaction_accepted: Some(transaction_accepted), + transaction_processed: None, + transaction_expired: transaction_expired.is_some(), }) } else { - Ok(DeployAggregate { - deploy_hash: hash.to_string(), - deploy_accepted: Some(deploy_accepted), - deploy_processed: None, - deploy_expired: false, + Ok(TransactionAggregate { + transaction_hash: hash.to_string(), + transaction_accepted: Some(transaction_accepted), + transaction_processed: None, + transaction_expired: false, }) } } else { @@ -433,46 +446,52 @@ impl DatabaseReader for FakeDatabase { }; } - async fn get_deploy_accepted_by_hash( + async fn get_transaction_accepted_by_hash( &self, + _transaction_type: &TransactionTypeId, hash: &str, - ) -> Result { + ) -> Result { let identifier = format!("{}-accepted", hash); let data = self.data.lock().expect("Error acquiring lock on data"); return if let Some(event) = data.get(&identifier) { - serde_json::from_str::(event).map_err(DatabaseReadError::Serialisation) + serde_json::from_str::(event) + .map_err(DatabaseReadError::Serialisation) } else { Err(DatabaseReadError::NotFound) }; } - async fn get_deploy_processed_by_hash( + async fn get_transaction_processed_by_hash( &self, + _transaction_type: &TransactionTypeId, hash: &str, - ) -> Result { + ) -> Result { let identifier = format!("{}-processed", hash); let data = self.data.lock().expect("Error acquiring lock on data"); return if let Some(event) = data.get(&identifier) { - serde_json::from_str::(event).map_err(DatabaseReadError::Serialisation) + serde_json::from_str::(event) + .map_err(DatabaseReadError::Serialisation) } else { Err(DatabaseReadError::NotFound) }; } - async fn get_deploy_expired_by_hash( + async fn get_transaction_expired_by_hash( &self, + _transaction_type: &TransactionTypeId, hash: &str, - ) -> Result { + ) -> Result { let identifier = format!("{}-expired", hash); let data = self.data.lock().expect("Error acquiring lock on data"); return if let Some(event) = data.get(&identifier) { - serde_json::from_str::(event).map_err(DatabaseReadError::Serialisation) + serde_json::from_str::(event) + .map_err(DatabaseReadError::Serialisation) } else { Err(DatabaseReadError::NotFound) }; @@ -542,9 +561,9 @@ impl DatabaseReader for FakeDatabase { pub struct IdentifiersForStoredEvents { pub block_added_hash: String, pub block_added_height: u64, - pub deploy_accepted_hash: String, - pub deploy_processed_hash: String, - pub deploy_expired_hash: String, + pub transaction_accepted_info: (String, TransactionTypeId), + pub transaction_processed_info: (String, TransactionTypeId), + pub transaction_expired_info: (String, TransactionTypeId), pub fault_public_key: String, pub fault_era_id: u64, pub finality_signatures_block_hash: String, diff --git a/event_sidecar/src/testing/fake_event_stream.rs b/event_sidecar/src/testing/fake_event_stream.rs index 8cf96063..998fe0c8 100644 --- a/event_sidecar/src/testing/fake_event_stream.rs +++ b/event_sidecar/src/testing/fake_event_stream.rs @@ -20,17 +20,17 @@ use crate::{ utils::tests::display_duration, }; use casper_event_types::{sse_data::SseData, Filter as SseFilter}; -use casper_types::{testing::TestRng, ProtocolVersion}; +use casper_types::{testing::TestRng, ProtocolVersion, Transaction}; use warp::{path::end, Filter}; const TIME_BETWEEN_BLOCKS: Duration = Duration::from_secs(30); const BLOCKS_IN_ERA: u64 = 4; const NUMBER_OF_VALIDATORS: u16 = 100; -const NUMBER_OF_DEPLOYS_PER_BLOCK: u16 = 20; +const NUMBER_OF_TRANSACTIONS_PER_BLOCK: u16 = 20; const API_VERSION: ProtocolVersion = ProtocolVersion::from_parts(1, 5, 2); type FrequencyOfStepEvents = u8; -type NumberOfDeployEventsInBurst = u64; +type NumberOftransactionEventsInBurst = u64; #[derive(Clone)] pub enum Bound { @@ -61,7 +61,7 @@ pub struct Restart { pub enum Scenario { Realistic(GenericScenarioSettings), LoadTestingStep(GenericScenarioSettings, FrequencyOfStepEvents), - LoadTestingDeploy(GenericScenarioSettings, NumberOfDeployEventsInBurst), + LoadTestingTransaction(GenericScenarioSettings, NumberOftransactionEventsInBurst), Spam(Bound), } @@ -72,8 +72,8 @@ impl Display for Scenario { Scenario::LoadTestingStep(_, _) => { write!(f, "Load Testing [Step]") } - Scenario::LoadTestingDeploy(_, _) => { - write!(f, "Load Testing [Deploy]") + Scenario::LoadTestingTransaction(_, _) => { + write!(f, "Load Testing [transaction]") } Scenario::Spam(_) => { write!(f, "Spam") @@ -112,8 +112,8 @@ async fn execute_scenario( ) .await } - Scenario::LoadTestingDeploy(settings, num_in_burst) => { - do_load_testing_deploy( + Scenario::LoadTestingTransaction(settings, num_in_burst) => { + do_load_testing_transaction( test_rng, events_sender, events_receiver, @@ -187,13 +187,13 @@ async fn do_spam_testing( bound: Bound, ) -> TestRng { let scenario_task = tokio::spawn(async move { - spam_deploy(&mut test_rng, events_sender.clone(), bound).await; + spam_transaction(&mut test_rng, events_sender.clone(), bound).await; test_rng }); let broadcasting_task = tokio::spawn(async move { while let Some(event) = events_receiver.recv().await { - event_stream_server.broadcast(event, Some(SseFilter::Main), None); + event_stream_server.broadcast(event, Some(SseFilter::Events), None); } }); @@ -201,7 +201,7 @@ async fn do_spam_testing( test_rng.expect("Should have returned TestRng for re-use") } -async fn do_load_testing_deploy( +async fn do_load_testing_transaction( mut test_rng: TestRng, events_sender: Sender, mut events_receiver: Receiver, @@ -210,7 +210,7 @@ async fn do_load_testing_deploy( num_in_burst: u64, ) -> TestRng { let scenario_task = tokio::spawn(async move { - load_testing_deploy( + load_testing_transaction( &mut test_rng, events_sender.clone(), settings.initial_phase, @@ -226,17 +226,17 @@ async fn do_load_testing_deploy( events_sender .send(SseData::Shutdown) .await - .expect("Scenario::LoadTestingDeploy failed sending shutdown message!"); + .expect("Scenario::LoadTestingtransaction failed sending shutdown message!"); tokio::time::sleep(delay_before_restart).await; - load_testing_deploy(&mut test_rng, events_sender, final_phase, num_in_burst).await; + load_testing_transaction(&mut test_rng, events_sender, final_phase, num_in_burst).await; } test_rng }); let broadcasting_task = tokio::spawn(async move { while let Some(event) = events_receiver.recv().await { - event_stream_server.broadcast(event, Some(SseFilter::Main), None); + event_stream_server.broadcast(event, Some(SseFilter::Events), None); } }); @@ -278,7 +278,7 @@ async fn do_load_testing_step( }); let broadcasting_task = tokio::spawn(async move { while let Some(event) = events_receiver.recv().await { - event_stream_server.broadcast(event, Some(SseFilter::Main), None); + event_stream_server.broadcast(event, Some(SseFilter::Events), None); } }); let (test_rng, _) = tokio::join!(scenario_task, broadcasting_task); @@ -313,7 +313,7 @@ async fn handle_realistic_scenario( }); let broadcasting_task = tokio::spawn(async move { while let Some(event) = events_receiver.recv().await { - event_stream_server.broadcast(event, Some(SseFilter::Main), None); + event_stream_server.broadcast(event, Some(SseFilter::Events), None); } }); let (test_rng, _) = tokio::join!(scenario_task, broadcasting_task); @@ -349,7 +349,7 @@ async fn realistic_event_streaming( type RealisticScenarioData = ( Vec, - Vec<(SseData, casper_event_types::Deploy)>, + Vec<(SseData, Transaction)>, Vec, Vec, Vec, @@ -360,22 +360,25 @@ type RealisticScenarioData = ( fn prepare_data(test_rng: &mut TestRng, loops_in_duration: u64) -> RealisticScenarioData { let finality_signatures_per_loop = NUMBER_OF_VALIDATORS as u64; let total_finality_signature_events = finality_signatures_per_loop * loops_in_duration; - let deploy_events_per_loop = NUMBER_OF_DEPLOYS_PER_BLOCK as u64; - let total_deploy_events = deploy_events_per_loop * loops_in_duration; + let transaction_events_per_loop = NUMBER_OF_TRANSACTIONS_PER_BLOCK as u64; + let total_transaction_events = transaction_events_per_loop * loops_in_duration; let total_block_added_events = loops_in_duration; let total_step_events = loops_in_duration / BLOCKS_IN_ERA; let block_added_events = iter::repeat_with(|| SseData::random_block_added(test_rng)) .take(plus_twenty_percent(total_block_added_events) as usize) .collect_vec(); - let deploy_accepted_events = iter::repeat_with(|| SseData::random_deploy_accepted(test_rng)) - .take(plus_twenty_percent(total_deploy_events) as usize) - .collect_vec(); - let deploy_expired_events = iter::repeat_with(|| SseData::random_deploy_expired(test_rng)) - .take((loops_in_duration / 2 + 1) as usize) - .collect_vec(); - let deploy_processed_events = iter::repeat_with(|| SseData::random_deploy_processed(test_rng)) - .take(plus_twenty_percent(total_deploy_events) as usize) - .collect_vec(); + let transaction_accepted_events = + iter::repeat_with(|| SseData::random_transaction_accepted(test_rng)) + .take(plus_twenty_percent(total_transaction_events) as usize) + .collect_vec(); + let transaction_expired_events = + iter::repeat_with(|| SseData::random_transaction_expired(test_rng)) + .take((loops_in_duration / 2 + 1) as usize) + .collect_vec(); + let transaction_processed_events = + iter::repeat_with(|| SseData::random_transaction_processed(test_rng)) + .take(plus_twenty_percent(total_transaction_events) as usize) + .collect_vec(); let fault_events = iter::repeat_with(|| SseData::random_fault(test_rng)) .take((loops_in_duration / 2 + 1) as usize) .collect_vec(); @@ -388,9 +391,9 @@ fn prepare_data(test_rng: &mut TestRng, loops_in_duration: u64) -> RealisticScen .collect_vec(); ( block_added_events, - deploy_accepted_events, - deploy_expired_events, - deploy_processed_events, + transaction_accepted_events, + transaction_expired_events, + transaction_processed_events, fault_events, finality_signature_events, step_events, @@ -407,9 +410,9 @@ async fn do_stream( ) { let ( mut block_added_events, - mut deploy_accepted_events, - mut deploy_expired_events, - mut deploy_processed_events, + mut transaction_accepted_events, + mut transaction_expired_events, + mut transaction_processed_events, mut fault_events, mut finality_signature_events, mut step_events, @@ -428,17 +431,17 @@ async fn do_stream( emit_events( &events_sender, &mut finality_signature_events, - &mut deploy_processed_events, + &mut transaction_processed_events, &mut block_added_events, - &mut deploy_accepted_events, + &mut transaction_accepted_events, ) .await; } if era_counter % 2 == 0 { events_sender - .send(deploy_expired_events.pop().unwrap()) + .send(transaction_expired_events.pop().unwrap()) .await - .expect("Failed sending deploy_expired_event"); + .expect("Failed sending transaction_expired_event"); } else { events_sender .send(fault_events.pop().unwrap()) @@ -453,14 +456,14 @@ async fn do_stream( async fn emit_events( events_sender: &Sender, finality_signature_events: &mut Vec, - deploy_processed_events: &mut Vec, + transaction_processed_events: &mut Vec, block_added_events: &mut Vec, - deploy_accepted_events: &mut Vec<(SseData, casper_event_types::Deploy)>, + transaction_accepted_events: &mut Vec<(SseData, casper_types::Transaction)>, ) { emit_sig_events(events_sender, finality_signature_events).await; - emit_deploy_processed_events(events_sender, deploy_processed_events).await; + emit_transaction_processed_events(events_sender, transaction_processed_events).await; emit_block_added_events(events_sender, block_added_events).await; - emit_deploy_accepted_events(events_sender, deploy_accepted_events).await; + emit_transaction_accepted_events(events_sender, transaction_accepted_events).await; } async fn emit_block_added_events( @@ -473,15 +476,15 @@ async fn emit_block_added_events( .expect("Failed sending block_added_event"); } -async fn emit_deploy_accepted_events( +async fn emit_transaction_accepted_events( events_sender: &Sender, - deploy_accepted_events: &mut Vec<(SseData, casper_event_types::Deploy)>, + transaction_accepted_events: &mut Vec<(SseData, casper_types::Transaction)>, ) { - for _ in 0..NUMBER_OF_DEPLOYS_PER_BLOCK { + for _ in 0..NUMBER_OF_TRANSACTIONS_PER_BLOCK { events_sender - .send(deploy_accepted_events.pop().unwrap().0) + .send(transaction_accepted_events.pop().unwrap().0) .await - .expect("Failed sending deploy_accepted_event"); + .expect("Failed sending transaction_accepted_event"); } } @@ -492,15 +495,15 @@ async fn emit_step(events_sender: &Sender, step_events: &mut Vec, - deploy_processed_events: &mut Vec, + transaction_processed_events: &mut Vec, ) { - for _ in 0..NUMBER_OF_DEPLOYS_PER_BLOCK { + for _ in 0..NUMBER_OF_TRANSACTIONS_PER_BLOCK { events_sender - .send(deploy_processed_events.pop().unwrap()) + .send(transaction_processed_events.pop().unwrap()) .await - .expect("Failed sending deploy_processed_events"); + .expect("Failed sending transaction_processed_events"); } } @@ -540,7 +543,7 @@ async fn load_testing_step( } } -async fn spam_deploy(test_rng: &mut TestRng, events_sender: Sender, bound: Bound) { +async fn spam_transaction(test_rng: &mut TestRng, events_sender: Sender, bound: Bound) { let start_time = Instant::now(); events_sender .send(SseData::ApiVersion(API_VERSION)) @@ -551,16 +554,16 @@ async fn spam_deploy(test_rng: &mut TestRng, events_sender: Sender, bou while start_time.elapsed() < duration { for _ in 0..100 { events_sender - .send(SseData::random_deploy_accepted(test_rng).0) + .send(SseData::random_transaction_accepted(test_rng).0) .await - .expect("failed sending random_deploy_accepted"); + .expect("failed sending random_transaction_accepted"); } } } } } -async fn load_testing_deploy( +async fn load_testing_transaction( test_rng: &mut TestRng, events_sender: Sender, bound: Bound, @@ -577,16 +580,16 @@ async fn load_testing_deploy( while start_time.elapsed() < duration { for _ in 0..burst_size { events_sender - .send(SseData::random_deploy_accepted(test_rng).0) + .send(SseData::random_transaction_accepted(test_rng).0) .await - .expect("failed sending random_deploy_accepted"); + .expect("failed sending random_transaction_accepted"); } tokio::time::sleep(Duration::from_millis(500)).await; for _ in 0..burst_size { events_sender - .send(SseData::random_deploy_processed(test_rng)) + .send(SseData::random_transaction_processed(test_rng)) .await - .expect("failed sending random_deploy_processed"); + .expect("failed sending random_transaction_processed"); } } } diff --git a/event_sidecar/src/testing/raw_sse_events_utils.rs b/event_sidecar/src/testing/raw_sse_events_utils.rs index 7d07cd6f..f1460b1e 100644 --- a/event_sidecar/src/testing/raw_sse_events_utils.rs +++ b/event_sidecar/src/testing/raw_sse_events_utils.rs @@ -15,7 +15,7 @@ pub(crate) mod tests { (None, "{\"ApiVersion\":\"1.5.3\"}".to_string()), ( Some("0".to_string()), - example_block_added_1_5_2(BLOCK_HASH_3, "3"), + example_block_added_2_0_0(BLOCK_HASH_3, "3"), ), ] } @@ -26,7 +26,7 @@ pub(crate) mod tests { (Some("0".to_string()), shutdown()), ( Some("1".to_string()), - example_block_added_1_5_2(BLOCK_HASH_1, "1"), + example_block_added_2_0_0(BLOCK_HASH_1, "1"), ), ] } @@ -50,7 +50,7 @@ pub(crate) mod tests { (None, format!("{{\"ApiVersion\":\"{version}\"}}")), ( Some("1".to_string()), - example_block_added_1_5_2(BLOCK_HASH_2, "2"), + example_block_added_2_0_0(BLOCK_HASH_2, "2"), ), ] } @@ -60,7 +60,7 @@ pub(crate) mod tests { (None, "{\"ApiVersion\":\"1.5.2\"}".to_string()), ( Some("1".to_string()), - example_block_added_1_5_2(BLOCK_HASH_2, "2"), + example_block_added_2_0_0(BLOCK_HASH_2, "2"), ), ] } @@ -70,7 +70,7 @@ pub(crate) mod tests { (None, "{\"ApiVersion\":\"1.5.2\"}".to_string()), ( Some("3".to_string()), - example_block_added_1_5_2(BLOCK_HASH_3, "3"), + example_block_added_2_0_0(BLOCK_HASH_3, "3"), ), ] } @@ -80,11 +80,11 @@ pub(crate) mod tests { (None, "{\"ApiVersion\":\"1.5.2\"}".to_string()), ( Some("1".to_string()), - example_block_added_1_5_2(BLOCK_HASH_3, "3"), + example_block_added_2_0_0(BLOCK_HASH_3, "3"), ), ( Some("1".to_string()), - example_block_added_1_5_2(BLOCK_HASH_4, "4"), + example_block_added_2_0_0(BLOCK_HASH_4, "4"), ), ] } @@ -125,7 +125,7 @@ pub(crate) mod tests { if let SseData::BlockAdded { block_hash, .. } = block_added { let encoded_hash = HexFmt(block_hash.inner()).to_string(); let block_added_raw = - example_block_added_1_5_2(encoded_hash.as_str(), index.as_str()); + example_block_added_2_0_0(encoded_hash.as_str(), index.as_str()); blocks_added.push((Some(index), block_added_raw)); } else { panic!("random_block_added didn't return SseData::BlockAdded"); diff --git a/event_sidecar/src/testing/shared.rs b/event_sidecar/src/testing/shared.rs index cf535ebd..e25ce367 100644 --- a/event_sidecar/src/testing/shared.rs +++ b/event_sidecar/src/testing/shared.rs @@ -6,9 +6,9 @@ pub(crate) enum EventType { ApiVersion, SidecarVersion, BlockAdded, - DeployAccepted, - DeployExpired, - DeployProcessed, + TransactionAccepted, + TransactionExpired, + TransactionProcessed, Fault, FinalitySignature, Step, @@ -21,9 +21,9 @@ impl From for EventType { SseData::ApiVersion(_) => EventType::ApiVersion, SseData::SidecarVersion(_) => EventType::SidecarVersion, SseData::BlockAdded { .. } => EventType::BlockAdded, - SseData::DeployAccepted { .. } => EventType::DeployAccepted, - SseData::DeployProcessed { .. } => EventType::DeployProcessed, - SseData::DeployExpired { .. } => EventType::DeployExpired, + SseData::TransactionAccepted { .. } => EventType::TransactionAccepted, + SseData::TransactionProcessed { .. } => EventType::TransactionProcessed, + SseData::TransactionExpired { .. } => EventType::TransactionExpired, SseData::Fault { .. } => EventType::Fault, SseData::FinalitySignature(_) => EventType::FinalitySignature, SseData::Step { .. } => EventType::Step, @@ -38,9 +38,9 @@ impl Display for EventType { EventType::ApiVersion => "ApiVersion", EventType::SidecarVersion => "SidecarVersion", EventType::BlockAdded => "BlockAdded", - EventType::DeployAccepted => "DeployAccepted", - EventType::DeployExpired => "DeployExpired", - EventType::DeployProcessed => "DeployProcessed", + EventType::TransactionAccepted => "TransactionAccepted", + EventType::TransactionExpired => "TransactionExpired", + EventType::TransactionProcessed => "TransactionProcessed", EventType::Fault => "Fault", EventType::FinalitySignature => "FinalitySignature", EventType::Step => "Step", diff --git a/event_sidecar/src/tests/integration_tests.rs b/event_sidecar/src/tests/integration_tests.rs index da0e5e1e..24cb729d 100644 --- a/event_sidecar/src/tests/integration_tests.rs +++ b/event_sidecar/src/tests/integration_tests.rs @@ -1,5 +1,5 @@ use bytes::Bytes; -use casper_event_types::sse_data::{test_support::*, SseData}; +use casper_event_types::sse_data::test_support::*; use casper_types::testing::TestRng; use core::time; use eventsource_stream::{Event, EventStream, Eventsource}; @@ -20,7 +20,6 @@ use crate::{ sse_server_example_1_5_2_data_second, sse_server_example_1_5_2_data_third, sse_server_shutdown_1_5_2_data, EventsWithIds, }, - shared::EventType, testing_config::{prepare_config, TestingConfig}, }, types::{ @@ -70,7 +69,7 @@ async fn given_sidecar_when_only_node_shuts_down_then_shut_down() { event_stream_server_port, ) = build_test_config(); - //MockNode::new should only have /events/main and /events sse endpoints, + //MockNode::new should only have /events and /events sse endpoints, // simulating a situation when a node doesn't expose all endpoints. let mut node_mock = MockNodeBuilder::build_example_1_5_2_node( node_port_for_sse_connection, @@ -79,7 +78,7 @@ async fn given_sidecar_when_only_node_shuts_down_then_shut_down() { start_nodes_and_wait(vec![&mut node_mock]).await; let sidecar_join = start_sidecar(testing_config).await; let (_, receiver) = - fetch_data_from_endpoint("/events/main?start_from=0", event_stream_server_port).await; + fetch_data_from_endpoint("/events?start_from=0", event_stream_server_port).await; wait_for_n_messages(1, receiver, Duration::from_secs(30)).await; stop_nodes_and_wait(vec![&mut node_mock]).await; @@ -110,7 +109,7 @@ async fn should_allow_client_connection_to_sse() { start_nodes_and_wait(vec![&mut node_mock]).await; start_sidecar(testing_config).await; let (join_handle, receiver) = - fetch_data_from_endpoint("/events/main?start_from=0", event_stream_server_port).await; + fetch_data_from_endpoint("/events?start_from=0", event_stream_server_port).await; wait_for_n_messages(1, receiver, Duration::from_secs(30)).await; stop_nodes_and_wait(vec![&mut node_mock]).await; @@ -139,7 +138,7 @@ async fn should_respond_to_rest_query() { start_nodes_and_wait(vec![&mut node_mock]).await; start_sidecar_with_rest_api(testing_config).await; let (_, receiver) = - fetch_data_from_endpoint("/events/main?start_from=0", event_stream_server_port).await; + fetch_data_from_endpoint("/events?start_from=0", event_stream_server_port).await; wait_for_n_messages(1, receiver, Duration::from_secs(30)).await; let block_request_url = format!("http://127.0.0.1:{}/block", sidecar_rest_server_port); @@ -175,7 +174,7 @@ async fn should_allow_partial_connection_on_one_filter() { start_nodes_and_wait(vec![&mut node_mock]).await; start_sidecar(testing_config).await; let (join_handle, receiver) = - fetch_data_from_endpoint("/events/main?start_from=0", event_stream_server_port).await; + fetch_data_from_endpoint("/events?start_from=0", event_stream_server_port).await; wait_for_n_messages(1, receiver, Duration::from_secs(30)).await; stop_nodes_and_wait(vec![&mut node_mock]).await; @@ -183,19 +182,6 @@ async fn should_allow_partial_connection_on_one_filter() { assert!(!events_received.is_empty()); } -#[tokio::test(flavor = "multi_thread", worker_threads = 4)] -async fn should_allow_partial_connection_on_two_filters() { - let received_event_types = partial_connection_test(true).await; - assert_eq!(received_event_types.len(), 1) -} - -#[tokio::test(flavor = "multi_thread", worker_threads = 4)] -async fn should_disallow_partial_connection_on_one_filter() { - let received_event_types = partial_connection_test(false).await; - //There should only be ApiVersion - assert!(received_event_types.is_empty()) -} - #[tokio::test(flavor = "multi_thread", worker_threads = 10)] async fn should_fail_to_reconnect() { let test_rng = TestRng::new(); @@ -218,7 +204,7 @@ async fn should_fail_to_reconnect() { start_nodes_and_wait(vec![&mut node_mock]).await; start_sidecar(testing_config).await; let (join_handle, receiver) = - fetch_data_from_endpoint("/events/main?start_from=0", event_stream_server_port).await; + fetch_data_from_endpoint("/events?start_from=0", event_stream_server_port).await; wait_for_n_messages(31, receiver, Duration::from_secs(120)).await; stop_nodes_and_wait(vec![&mut node_mock]).await; @@ -265,7 +251,7 @@ async fn should_reconnect() { start_nodes_and_wait(vec![&mut node_mock]).await; start_sidecar(testing_config).await; let (join_handle, receiver) = - fetch_data_from_endpoint("/events/main?start_from=0", event_stream_server_port).await; + fetch_data_from_endpoint("/events?start_from=0", event_stream_server_port).await; let receiver = wait_for_n_messages(31, receiver, Duration::from_secs(120)).await; stop_nodes_and_wait(vec![&mut node_mock]).await; @@ -308,7 +294,7 @@ async fn shutdown_should_be_passed_through() { start_nodes_and_wait(vec![&mut node_mock]).await; start_sidecar(testing_config).await; let (join_handle, receiver) = - fetch_data_from_endpoint("/events/main?start_from=0", event_stream_server_port).await; + fetch_data_from_endpoint("/events?start_from=0", event_stream_server_port).await; wait_for_n_messages(2, receiver, Duration::from_secs(120)).await; stop_nodes_and_wait(vec![&mut node_mock]).await; @@ -339,7 +325,7 @@ async fn connecting_to_node_prior_to_1_5_2_should_fail() { start_nodes_and_wait(vec![&mut node_mock]).await; start_sidecar(testing_config).await; let (join_handle, _) = fetch_data_from_endpoint_with_panic_flag( - "/events/main?start_from=0", + "/events?start_from=0", event_stream_server_port, false, ) @@ -371,7 +357,7 @@ async fn shutdown_should_be_passed_through_when_versions_change() { start_nodes_and_wait(vec![&mut node_mock]).await; start_sidecar(testing_config).await; let (join_handle, receiver) = - fetch_data_from_endpoint("/events/main?start_from=0", event_stream_server_port).await; + fetch_data_from_endpoint("/events?start_from=0", event_stream_server_port).await; let receiver = wait_for_n_messages(3, receiver, Duration::from_secs(120)).await; stop_nodes_and_wait(vec![&mut node_mock]).await; let mut node_mock = MockNodeBuilder::build_example_node_with_version( @@ -432,7 +418,7 @@ async fn sidecar_should_use_start_from_if_database_is_empty() { ) = build_test_config(); let data_of_node = vec![( Some("2".to_string()), - example_block_added_1_5_2(BLOCK_HASH_3, "3"), + example_block_added_2_0_0(BLOCK_HASH_3, "3"), )]; let mut node_mock = MockNodeBuilder { version: "1.5.2".to_string(), @@ -445,7 +431,7 @@ async fn sidecar_should_use_start_from_if_database_is_empty() { start_nodes_and_wait(vec![&mut node_mock]).await; start_sidecar(testing_config).await; let (join_handle, receiver) = - fetch_data_from_endpoint("/events/main?start_from=0", event_stream_server_port).await; + fetch_data_from_endpoint("/events?start_from=0", event_stream_server_port).await; wait_for_n_messages(2, receiver, Duration::from_secs(120)).await; stop_nodes_and_wait(vec![&mut node_mock]).await; let events_received = tokio::join!(join_handle).0.unwrap(); @@ -489,7 +475,7 @@ async fn sidecar_should_use_start_from_if_database_is_not_empty() { start_nodes_and_wait(vec![&mut node_mock]).await; start_sidecar(testing_config).await; let (join_handle, receiver) = - fetch_data_from_endpoint("/events/main?start_from=0", event_stream_server_port).await; + fetch_data_from_endpoint("/events?start_from=0", event_stream_server_port).await; wait_for_n_messages(1, receiver, Duration::from_secs(120)).await; stop_nodes_and_wait(vec![&mut node_mock]).await; @@ -519,7 +505,7 @@ async fn sidecar_should_connect_to_multiple_nodes() { ]); start_sidecar(testing_config).await; let (join_handle, receiver) = - fetch_data_from_endpoint("/events/main?start_from=0", event_stream_server_port).await; + fetch_data_from_endpoint("/events?start_from=0", event_stream_server_port).await; wait_for_n_messages(4, receiver, Duration::from_secs(120)).await; stop_nodes_and_wait(vec![&mut mock_node_1, &mut mock_node_2, &mut mock_node_3]).await; @@ -555,7 +541,7 @@ async fn sidecar_should_not_downgrade_api_version_when_new_nodes_disconnect() { ]); start_sidecar(testing_config).await; let (join_handle, receiver) = - fetch_data_from_endpoint("/events/main?start_from=0", event_stream_server_port).await; + fetch_data_from_endpoint("/events?start_from=0", event_stream_server_port).await; let receiver = wait_for_n_messages(2, receiver, Duration::from_secs(120)).await; mock_node_1.stop().await; mock_node_2.start().await; @@ -589,7 +575,7 @@ async fn sidecar_should_report_only_one_api_version_if_there_was_no_update() { ]); start_sidecar(testing_config).await; let (join_handle, receiver) = - fetch_data_from_endpoint("/events/main?start_from=0", event_stream_server_port).await; + fetch_data_from_endpoint("/events?start_from=0", event_stream_server_port).await; wait_for_n_messages(3, receiver, Duration::from_secs(120)).await; stop_nodes_and_wait(vec![&mut mock_node_1, &mut mock_node_2]).await; let events_received = tokio::join!(join_handle).0.unwrap(); @@ -621,7 +607,7 @@ async fn sidecar_should_connect_to_multiple_nodes_even_if_some_of_them_dont_resp ]); start_sidecar(testing_config).await; let (join_handle, receiver) = - fetch_data_from_endpoint("/events/main?start_from=0", event_stream_server_port).await; + fetch_data_from_endpoint("/events?start_from=0", event_stream_server_port).await; wait_for_n_messages(3, receiver, Duration::from_secs(120)).await; stop_nodes_and_wait(vec![&mut mock_node_1, &mut mock_node_2]).await; @@ -639,34 +625,6 @@ async fn sidecar_should_connect_to_multiple_nodes_even_if_some_of_them_dont_resp )); } -async fn partial_connection_test(allow_partial_connection: bool) -> Vec { - // Prepare the mock node, by the "default" config it should have only the /events and /events/main endpoints - let (sse_port, rest_port, mut node_mock) = build_1_5_2(sse_server_example_1_5_2_data()).await; - // Setup config for the sidecar - // - Set the sidecar to reattempt connection only once after a 2 second delay. - // - Allow partial based on the value passed to the function. - let (mut testing_config, event_stream_server_port, _temp_storage_dir) = - build_testing_config_based_on_ports(vec![(sse_port, rest_port)]); - testing_config.set_allow_partial_connection_for_node(sse_port, allow_partial_connection); - // Start the mock node - start_nodes_and_wait(vec![&mut node_mock]).await; - - // Run the Sidecar in another task with the prepared config. - start_sidecar(testing_config).await; - let (join_handle, receiver) = - fetch_data_from_endpoint("/events/main?start_from=0", event_stream_server_port).await; - let _ = wait_for_n_messages(1, receiver, Duration::from_secs(60)).await; - //We need to stop the mock node, otherwise `join_handle` might never finish (it listens to events until sidecar stops) - stop_nodes_and_wait(vec![&mut node_mock]).await; - let events_received = tokio::join!(join_handle).0.unwrap(); - let collected: Vec = events_received - .iter() - .map(|raw_events| serde_json::from_str::(raw_events).unwrap().into()) - .filter(|t: &EventType| *t != EventType::ApiVersion) - .collect(); - collected -} - pub async fn try_connect_to_single_stream( url: &str, ) -> Option> + Sized>> { diff --git a/event_sidecar/src/tests/integration_tests_version_switch.rs b/event_sidecar/src/tests/integration_tests_version_switch.rs index feb5bd0f..684c30b5 100644 --- a/event_sidecar/src/tests/integration_tests_version_switch.rs +++ b/event_sidecar/src/tests/integration_tests_version_switch.rs @@ -15,11 +15,9 @@ pub mod tests { async fn should_successfully_switch_api_versions() { let mut node_mock = MockNodeBuilder::build_example_node_with_version(None, None, "1.5.2"); let properties = prepare_one_node_and_start(&mut node_mock).await; - let (join_handle, receiver) = fetch_data_from_endpoint( - "/events/main?start_from=0", - properties.event_stream_server_port, - ) - .await; + let (join_handle, receiver) = + fetch_data_from_endpoint("/events?start_from=0", properties.event_stream_server_port) + .await; let receiver = wait_for_n_messages(1, receiver, Duration::from_secs(120)).await; stop_nodes_and_wait(vec![&mut node_mock]).await; diff --git a/event_sidecar/src/tests/performance_tests.rs b/event_sidecar/src/tests/performance_tests.rs index 14a18a27..35e5df2d 100644 --- a/event_sidecar/src/tests/performance_tests.rs +++ b/event_sidecar/src/tests/performance_tests.rs @@ -71,11 +71,11 @@ async fn check_latency_on_load_testing_step_scenario() { #[tokio::test(flavor = "multi_thread", worker_threads = 4)] #[ignore] -async fn check_latency_on_load_testing_deploys_scenario() { +async fn check_latency_on_load_testing_transactions_scenario() { let duration = Duration::from_secs(60); performance_check( - Scenario::LoadTestingDeploy( + Scenario::LoadTestingTransaction( GenericScenarioSettings::new(Bound::Timed(duration), None), 20, ), @@ -151,9 +151,9 @@ pub(crate) enum EventType { ApiVersion, SidecarVersion, BlockAdded, - DeployAccepted, - DeployExpired, - DeployProcessed, + TransactionAccepted, + TransactionExpired, + TransactionProcessed, Fault, FinalitySignature, Step, @@ -166,9 +166,9 @@ impl From for EventType { SseData::ApiVersion(_) => EventType::ApiVersion, SseData::SidecarVersion(_) => EventType::SidecarVersion, SseData::BlockAdded { .. } => EventType::BlockAdded, - SseData::DeployAccepted { .. } => EventType::DeployAccepted, - SseData::DeployProcessed { .. } => EventType::DeployProcessed, - SseData::DeployExpired { .. } => EventType::DeployExpired, + SseData::TransactionAccepted { .. } => EventType::TransactionAccepted, + SseData::TransactionProcessed { .. } => EventType::TransactionProcessed, + SseData::TransactionExpired { .. } => EventType::TransactionExpired, SseData::Fault { .. } => EventType::Fault, SseData::FinalitySignature(_) => EventType::FinalitySignature, SseData::Step { .. } => EventType::Step, @@ -183,9 +183,9 @@ impl Display for EventType { EventType::ApiVersion => "ApiVersion", EventType::SidecarVersion => "SidecarVersion", EventType::BlockAdded => "BlockAdded", - EventType::DeployAccepted => "DeployAccepted", - EventType::DeployExpired => "DeployExpired", - EventType::DeployProcessed => "DeployProcessed", + EventType::TransactionAccepted => "TransactionAccepted", + EventType::TransactionExpired => "TransactionExpired", + EventType::TransactionProcessed => "TransactionProcessed", EventType::Fault => "Fault", EventType::FinalitySignature => "FinalitySignature", EventType::Step => "Step", @@ -205,9 +205,11 @@ impl TimestampedEvent { SseData::ApiVersion(_) => "ApiVersion".to_string(), SseData::SidecarVersion(_) => "SidecarVersion".to_string(), SseData::BlockAdded { block_hash, .. } => block_hash.to_string(), - SseData::DeployAccepted { deploy } => deploy.hash().to_string(), - SseData::DeployProcessed { deploy_hash, .. } => deploy_hash.to_string(), - SseData::DeployExpired { deploy_hash } => deploy_hash.to_string(), + SseData::TransactionAccepted(transaction) => transaction.hash().to_string(), + SseData::TransactionProcessed { + transaction_hash, .. + } => transaction_hash.to_string(), + SseData::TransactionExpired { transaction_hash } => transaction_hash.to_string(), SseData::Fault { era_id, public_key, .. } => format!("{}-{}", era_id.value(), public_key.to_hex()), @@ -225,9 +227,9 @@ impl TimestampedEvent { match (&self.event, &other.event) { (SseData::ApiVersion(_), SseData::ApiVersion(_)) | (SseData::BlockAdded { .. }, SseData::BlockAdded { .. }) - | (SseData::DeployAccepted { .. }, SseData::DeployAccepted { .. }) - | (SseData::DeployProcessed { .. }, SseData::DeployProcessed { .. }) - | (SseData::DeployExpired { .. }, SseData::DeployExpired { .. }) + | (SseData::TransactionAccepted { .. }, SseData::TransactionAccepted { .. }) + | (SseData::TransactionProcessed { .. }, SseData::TransactionProcessed { .. }) + | (SseData::TransactionExpired { .. }, SseData::TransactionExpired { .. }) | (SseData::Fault { .. }, SseData::Fault { .. }) | (SseData::FinalitySignature(_), SseData::FinalitySignature(_)) | (SseData::Step { .. }, SseData::Step { .. }) @@ -356,11 +358,11 @@ async fn performance_check(scenario: Scenario, duration: Duration, acceptable_la let event_types_ordered_for_efficiency = vec![ EventType::FinalitySignature, - EventType::DeployAccepted, - EventType::DeployProcessed, + EventType::TransactionAccepted, + EventType::TransactionProcessed, EventType::BlockAdded, EventType::Step, - EventType::DeployExpired, + EventType::TransactionExpired, EventType::Fault, ]; @@ -431,11 +433,11 @@ async fn live_performance_check( let event_types_ordered_for_efficiency = vec![ EventType::FinalitySignature, - EventType::DeployAccepted, - EventType::DeployProcessed, + EventType::TransactionAccepted, + EventType::TransactionProcessed, EventType::BlockAdded, EventType::Step, - EventType::DeployExpired, + EventType::TransactionExpired, EventType::Fault, ]; @@ -457,9 +459,9 @@ fn check_latencies_are_acceptable( ) { let event_types = vec![ EventType::BlockAdded, - EventType::DeployAccepted, - EventType::DeployExpired, - EventType::DeployProcessed, + EventType::TransactionAccepted, + EventType::TransactionExpired, + EventType::TransactionProcessed, EventType::Fault, EventType::FinalitySignature, EventType::Step, @@ -486,9 +488,9 @@ fn create_results_from_data( ) -> Vec { let event_types_ordered_for_display = vec![ EventType::BlockAdded, - EventType::DeployAccepted, - EventType::DeployExpired, - EventType::DeployProcessed, + EventType::TransactionAccepted, + EventType::TransactionExpired, + EventType::TransactionProcessed, EventType::Fault, EventType::FinalitySignature, EventType::Step, @@ -675,8 +677,11 @@ async fn start_counting_outbound_events( cancellation_token: CancellationToken, event_stream_server_port: u16, ) -> JoinHandle { - let (_, receiver) = - fetch_data_from_endpoint("/events/deploys?start_from=0", event_stream_server_port).await; + let (_, receiver) = fetch_data_from_endpoint( + "/events/Transactions?start_from=0", + event_stream_server_port, + ) + .await; let mut receiver = wait_for_n_messages(1, receiver, Duration::from_secs(120)).await; tokio::spawn(async move { let mut counter = 0; diff --git a/event_sidecar/src/types/database.rs b/event_sidecar/src/types/database.rs index 4ba48dfb..adfb3793 100644 --- a/event_sidecar/src/types/database.rs +++ b/event_sidecar/src/types/database.rs @@ -3,19 +3,47 @@ use crate::{ postgresql_database::PostgreSqlDatabase, sqlite_database::SqliteDatabase, types::DDLConfiguration, }, - sql::tables, + sql::{tables, tables::transaction_type::TransactionTypeId as SqlTransactionTypeId}, types::sse_events::{ - BlockAdded, DeployAccepted, DeployExpired, DeployProcessed, Fault, FinalitySignature, Step, + BlockAdded, Fault, FinalitySignature, Step, TransactionAccepted, TransactionExpired, + TransactionProcessed, }, StorageConfig, }; use anyhow::{Context, Error}; use async_trait::async_trait; -use casper_event_types::FinalitySignature as FinSig; +use casper_types::FinalitySignature as FinSig; use serde::{Deserialize, Serialize}; +#[cfg(test)] +use std::fmt::{Display, Formatter}; use std::{path::Path, sync::Arc}; use utoipa::ToSchema; +pub enum TransactionTypeId { + Deploy, + Version1, +} + +impl From<&TransactionTypeId> for u8 { + fn from(transaction_type: &TransactionTypeId) -> u8 { + let sql_transaction_type = match transaction_type { + TransactionTypeId::Deploy => SqlTransactionTypeId::Deploy, + TransactionTypeId::Version1 => SqlTransactionTypeId::Version1, + }; + sql_transaction_type as u8 + } +} + +#[cfg(test)] +impl Display for TransactionTypeId { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + match self { + TransactionTypeId::Deploy => write!(f, "deploy"), + TransactionTypeId::Version1 => write!(f, "version1"), + } + } +} + #[derive(Clone)] pub enum Database { SqliteDatabaseWrapper(SqliteDatabase), @@ -69,36 +97,36 @@ pub trait DatabaseWriter { ) -> Result; /// Save a DeployAccepted event to the database. /// - /// * `deploy_accepted`: the [DeployAccepted] from the `data` field. + /// * `transaction_accepted`: the [DeployAccepted] from the `data` field. /// * `event_id`: the node-specific assigned `id`. /// * `event_source_address`: the IP address of the source node. - async fn save_deploy_accepted( + async fn save_transaction_accepted( &self, - deploy_accepted: DeployAccepted, + transaction_accepted: TransactionAccepted, event_id: u32, event_source_address: String, api_version: String, ) -> Result; /// Save a DeployProcessed event to the database. /// - /// * `deploy_accepted`: the [DeployProcessed] from the `data` field. + /// * `transaction_accepted`: the [DeployProcessed] from the `data` field. /// * `event_id`: the node-specific assigned `id`. /// * `event_source_address`: the IP address of the source node. - async fn save_deploy_processed( + async fn save_transaction_processed( &self, - deploy_processed: DeployProcessed, + transaction_processed: TransactionProcessed, event_id: u32, event_source_address: String, api_version: String, ) -> Result; /// Save a DeployExpired event to the database. /// - /// * `deploy_expired`: the [DeployExpired] from the `data` field. + /// * `transaction_expired`: the [DeployExpired] from the `data` field. /// * `event_id`: the node-specific assigned `id`. /// * `event_source_address`: the IP address of the source node. - async fn save_deploy_expired( + async fn save_transaction_expired( &self, - deploy_expired: DeployExpired, + transaction_expired: TransactionExpired, event_id: u32, event_source_address: String, api_version: String, @@ -211,7 +239,7 @@ impl From for DatabaseWriteError { } "1555" | "2067" => { // The message looks something like this: - // UNIQUE constraint failed: DeployProcessed.deploy_hash + // UNIQUE constraint failed: DeployProcessed.transaction_hash let table = db_err.message().split(':').collect::>()[1] .split('.') @@ -252,35 +280,39 @@ pub trait DatabaseReader { /// /// * `hash` - hash which identifies the block async fn get_block_by_hash(&self, hash: &str) -> Result; - /// Returns an aggregate of the deploy's events corresponding to the given hex-encoded `hash` + /// Returns an aggregate of the transaction's events corresponding to the given hex-encoded `hash` /// - /// * `hash` - deploy hash of which the aggregate data should be fetched - async fn get_deploy_aggregate_by_hash( + /// * `hash` - transaction hash of which the aggregate data should be fetched + async fn get_transaction_aggregate_by_identifier( &self, + transaction_type: &TransactionTypeId, hash: &str, - ) -> Result; + ) -> Result; /// Returns the [DeployAccepted] corresponding to the given hex-encoded `hash` /// - /// * `hash` - deploy hash which identifies the deploy accepted - async fn get_deploy_accepted_by_hash( + /// * `hash` - transaction hash which identifies the transaction accepted + async fn get_transaction_accepted_by_hash( &self, + transaction_type: &TransactionTypeId, hash: &str, - ) -> Result; + ) -> Result; /// Returns the [DeployProcessed] corresponding to the given hex-encoded `hash` /// - /// * `hash` - deploy hash which identifies the deploy pocessed - async fn get_deploy_processed_by_hash( + /// * `hash` - transaction hash which identifies the transaction pocessed + async fn get_transaction_processed_by_hash( &self, + transaction_type: &TransactionTypeId, hash: &str, - ) -> Result; + ) -> Result; /// Returns the [DeployExpired] corresponding to the given hex-encoded `hash` /// - /// * `hash` - deploy hash which identifies the deploy expired - async fn get_deploy_expired_by_hash( + /// * `hash` - transaction hash which identifies the transaction expired + async fn get_transaction_expired_by_hash( &self, + transaction_type: &TransactionTypeId, hash: &str, - ) -> Result; + ) -> Result; /// Returns all [Fault]s that correspond to the given hex-encoded `public_key` /// /// * `public_key` - key which identifies the fault @@ -323,11 +355,11 @@ pub enum DatabaseReadError { } #[derive(Debug, Deserialize, Serialize, Clone, ToSchema)] -pub struct DeployAggregate { - pub(crate) deploy_hash: String, - pub(crate) deploy_accepted: Option, - pub(crate) deploy_processed: Option, - pub(crate) deploy_expired: bool, +pub struct TransactionAggregate { + pub(crate) transaction_hash: String, + pub(crate) transaction_accepted: Option, + pub(crate) transaction_processed: Option, + pub(crate) transaction_expired: bool, } #[allow(dead_code)] //Allowing dead code here because the Raw enum is used only in ITs @@ -392,11 +424,25 @@ impl Migration { Migration { version: Some(1), statement_producers: |config: DDLConfiguration| { - let insert_types_stmt = - tables::event_type::create_initialise_stmt().map_err(|err| { - Error::msg(format!("Error building create_initialise_stmt: {:?}", err)) + let insert_event_types_stmt = tables::event_type::create_initialise_stmt() + .map_err(|err| { + Error::msg(format!( + "Error building event types insert statement: {:?}", + err + )) })?; - Ok(migration_1_ddl_statements(config, insert_types_stmt)) + let insert_transaction_types_stmt = + tables::transaction_type::create_initialise_stmt().map_err(|err| { + Error::msg(format!( + "Error building transaction types insert statement: {:?}", + err + )) + })?; + Ok(migration_1_ddl_statements( + config, + insert_event_types_stmt, + insert_transaction_types_stmt, + )) }, script_executor: None, } @@ -416,24 +462,29 @@ impl Migration { fn migration_1_ddl_statements( config: DDLConfiguration, - insert_types_stmt: sea_query::InsertStatement, + insert_event_types_stmt: sea_query::InsertStatement, + insert_transaction_types_stmt: sea_query::InsertStatement, ) -> Vec { - let init_stmt = StatementWrapper::InsertStatement(insert_types_stmt); vec![ // Synthetic tables StatementWrapper::TableCreateStatement(Box::new(tables::event_type::create_table_stmt())), + StatementWrapper::TableCreateStatement(Box::new( + tables::transaction_type::create_table_stmt(), + )), StatementWrapper::TableCreateStatement(Box::new(tables::event_log::create_table_stmt())), - StatementWrapper::TableCreateStatement(Box::new(tables::deploy_event::create_table_stmt())), + StatementWrapper::TableCreateStatement(Box::new( + tables::transaction_event::create_table_stmt(), + )), // Raw Event tables StatementWrapper::TableCreateStatement(Box::new(tables::block_added::create_table_stmt())), StatementWrapper::TableCreateStatement(Box::new( - tables::deploy_accepted::create_table_stmt(), + tables::transaction_accepted::create_table_stmt(), )), StatementWrapper::TableCreateStatement(Box::new( - tables::deploy_processed::create_table_stmt(), + tables::transaction_processed::create_table_stmt(), )), StatementWrapper::TableCreateStatement(Box::new( - tables::deploy_expired::create_table_stmt(), + tables::transaction_expired::create_table_stmt(), )), StatementWrapper::TableCreateStatement(Box::new(tables::fault::create_table_stmt( config.db_supports_unsigned, @@ -445,6 +496,7 @@ fn migration_1_ddl_statements( config.db_supports_unsigned, ))), StatementWrapper::TableCreateStatement(Box::new(tables::shutdown::create_table_stmt())), - init_stmt, + StatementWrapper::InsertStatement(insert_event_types_stmt), + StatementWrapper::InsertStatement(insert_transaction_types_stmt), ] } diff --git a/event_sidecar/src/types/sse_events.rs b/event_sidecar/src/types/sse_events.rs index 5e63a769..2eb31505 100644 --- a/event_sidecar/src/types/sse_events.rs +++ b/event_sidecar/src/types/sse_events.rs @@ -1,12 +1,19 @@ +use casper_types::FinalitySignature as FinSig; +use casper_types::{ + contract_messages::Messages, execution::ExecutionResult, AsymmetricType, Block, BlockHash, + EraId, InitiatorAddr, ProtocolVersion, PublicKey, TimeDiff, Timestamp, Transaction, + TransactionHash, +}; #[cfg(test)] -use casper_event_types::Digest; -use casper_event_types::{BlockHash, Deploy, DeployHash, FinalitySignature as FinSig, JsonBlock}; +use casper_types::ChainNameDigest; #[cfg(test)] -use casper_types::testing::TestRng; use casper_types::{ - AsymmetricType, EraId, ExecutionResult, ProtocolVersion, PublicKey, TimeDiff, Timestamp, + execution::{execution_result_v1::ExecutionResultV1, Effects, ExecutionResultV2}, + testing::TestRng, + TestBlockBuilder, TestBlockV1Builder, }; use derive_new::new; +use hex::ToHex; #[cfg(test)] use rand::Rng; use serde::{Deserialize, Serialize}; @@ -17,6 +24,8 @@ use std::{ }; use utoipa::ToSchema; +use crate::sql::tables::transaction_type::TransactionTypeId; + /// The version of this node's API server. This event will always be the first sent to a new /// client, and will have no associated event ID provided. #[derive(Clone, Debug, Serialize, Deserialize, new)] @@ -26,15 +35,40 @@ pub struct ApiVersion(ProtocolVersion); #[derive(Clone, Debug, Serialize, Deserialize, new, ToSchema)] pub struct BlockAdded { block_hash: BlockHash, - block: Box, + block: Box, +} + +#[cfg(test)] +pub fn random_execution_result(rng: &mut TestRng) -> ExecutionResult { + match rng.gen_range(0..2) { + 0 => { + let result_v1: ExecutionResultV1 = rng.gen(); + ExecutionResult::V1(result_v1) + } + 1 => { + let result_v2: ExecutionResultV2 = rng.gen(); + ExecutionResult::V2(result_v2) + } + _ => panic!("Unexpected value"), + } } #[cfg(test)] impl BlockAdded { pub fn random(rng: &mut TestRng) -> Self { - let block = JsonBlock::random(rng); + let block = match rng.gen_range(0..2) { + 0 => { + let block_v1 = TestBlockV1Builder::default().build(rng); + Block::V1(block_v1) + } + 1 => { + let block_v2 = TestBlockBuilder::default().build(rng); + Block::V2(block_v2) + } + _ => panic!("Unexpected value"), + }; Self { - block_hash: block.hash, + block_hash: *block.hash(), block: Box::new(block), } } @@ -46,86 +80,163 @@ impl BlockAdded { } pub fn get_height(&self) -> u64 { - self.block.header.height + self.block.height() } } -/// The given deploy has been newly-accepted by this node. +/// The given transaction has been newly-accepted by this node. #[derive(Clone, Debug, Serialize, Deserialize, new, ToSchema)] -pub struct DeployAccepted { - // It's an Arc to not create multiple copies of the same deploy for multiple subscribers. - deploy: Arc, +pub struct TransactionAccepted { + // It's an Arc to not create multiple copies of the same transaction for multiple subscribers. + transaction: Arc, } -impl DeployAccepted { +impl TransactionAccepted { + pub fn identifier(&self) -> String { + transaction_hash_to_identifier(&self.transaction.hash()) + } + + pub fn transaction_type_id(&self) -> TransactionTypeId { + match *self.transaction { + Transaction::Deploy(_) => TransactionTypeId::Deploy, + Transaction::V1(_) => TransactionTypeId::Version1, + } + } + + #[cfg(test)] + pub fn api_transaction_type_id(&self) -> crate::types::database::TransactionTypeId { + match *self.transaction { + Transaction::Deploy(_) => crate::types::database::TransactionTypeId::Deploy, + Transaction::V1(_) => crate::types::database::TransactionTypeId::Version1, + } + } + #[cfg(test)] pub fn random(rng: &mut TestRng) -> Self { Self { - deploy: Arc::new(Deploy::random(rng)), + transaction: Arc::new(Transaction::random(rng)), } } #[cfg(test)] - pub fn deploy_hash(&self) -> DeployHash { - self.deploy.hash().to_owned() + pub fn transaction_hash(&self) -> TransactionHash { + self.transaction.hash().to_owned() } pub fn hex_encoded_hash(&self) -> String { - hex::encode(self.deploy.hash().inner()) + let hex_fmt: String = match self.transaction.hash() { + TransactionHash::Deploy(deploy) => deploy.encode_hex(), + TransactionHash::V1(transaction) => transaction.encode_hex(), + }; + hex_fmt } } -/// The given deploy has been executed, committed and forms part of the given block. +/// The given transaction has been executed, committed and forms part of the given block. #[derive(Clone, Debug, Serialize, Deserialize, new, ToSchema)] -pub struct DeployProcessed { - deploy_hash: Box, +pub struct TransactionProcessed { + transaction_hash: Box, #[schema(value_type = String)] - account: Box, + initiator_addr: Box, #[schema(value_type = String)] timestamp: Timestamp, #[schema(value_type = String)] ttl: TimeDiff, - dependencies: Vec, block_hash: Box, + //#[data_size(skip)] execution_result: Box, + messages: Messages, } -impl DeployProcessed { +impl TransactionProcessed { + pub fn identifier(&self) -> String { + transaction_hash_to_identifier(&self.transaction_hash) + } + + pub fn transaction_type_id(&self) -> TransactionTypeId { + match *self.transaction_hash.as_ref() { + TransactionHash::Deploy(_) => TransactionTypeId::Deploy, + TransactionHash::V1(_) => TransactionTypeId::Version1, + } + } + #[cfg(test)] - pub fn random(rng: &mut TestRng, with_deploy_hash: Option) -> Self { - let deploy = Deploy::random(rng); + pub fn api_transaction_type_id(&self) -> crate::types::database::TransactionTypeId { + match *self.transaction_hash.as_ref() { + TransactionHash::Deploy(_) => crate::types::database::TransactionTypeId::Deploy, + TransactionHash::V1(_) => crate::types::database::TransactionTypeId::Version1, + } + } + + #[cfg(test)] + pub fn random(rng: &mut TestRng, with_transaction_hash: Option) -> Self { + let transaction = Transaction::random(rng); + let ttl = match &transaction { + Transaction::Deploy(deploy) => deploy.ttl(), + Transaction::V1(transaction) => transaction.ttl(), + }; + let timestamp = match &transaction { + Transaction::Deploy(deploy) => deploy.timestamp(), + Transaction::V1(transaction) => transaction.timestamp(), + }; + let initiator_addr = Box::new(transaction.initiator_addr()); Self { - deploy_hash: Box::new(with_deploy_hash.unwrap_or(*deploy.hash())), - account: Box::new(deploy.header().account().clone()), - timestamp: deploy.header().timestamp(), - ttl: deploy.header().ttl(), - dependencies: deploy.header().dependencies().clone(), + transaction_hash: Box::new(with_transaction_hash.unwrap_or(transaction.hash())), + initiator_addr, + timestamp, + ttl, block_hash: Box::new(BlockHash::random(rng)), - execution_result: Box::new(rng.gen()), + execution_result: Box::new(random_execution_result(rng)), + messages: rng.random_vec(1..5), } } pub fn hex_encoded_hash(&self) -> String { - hex::encode(self.deploy_hash.inner()) + match *self.transaction_hash.as_ref() { + TransactionHash::Deploy(deploy_hash) => deploy_hash.encode_hex(), + TransactionHash::V1(v1_hash) => v1_hash.encode_hex(), + } } } -/// The given deploy has expired. +/// The given transaction has expired. #[derive(Clone, Debug, Serialize, Deserialize, new, ToSchema)] -pub struct DeployExpired { - deploy_hash: DeployHash, +pub struct TransactionExpired { + transaction_hash: TransactionHash, } -impl DeployExpired { +impl TransactionExpired { + pub fn identifier(&self) -> String { + transaction_hash_to_identifier(&self.transaction_hash) + } + + pub fn transaction_type_id(&self) -> TransactionTypeId { + match self.transaction_hash { + TransactionHash::Deploy(_) => TransactionTypeId::Deploy, + TransactionHash::V1(_) => TransactionTypeId::Version1, + } + } + #[cfg(test)] - pub fn random(rng: &mut TestRng, with_deploy_hash: Option) -> Self { + pub fn api_transaction_type_id(&self) -> crate::types::database::TransactionTypeId { + match self.transaction_hash { + TransactionHash::Deploy(_) => crate::types::database::TransactionTypeId::Deploy, + TransactionHash::V1(_) => crate::types::database::TransactionTypeId::Version1, + } + } + + #[cfg(test)] + pub fn random(rng: &mut TestRng, with_transaction_hash: Option) -> Self { Self { - deploy_hash: with_deploy_hash.unwrap_or_else(|| DeployHash::new(Digest::random(rng))), + transaction_hash: with_transaction_hash.unwrap_or_else(|| TransactionHash::random(rng)), } } pub fn hex_encoded_hash(&self) -> String { - hex::encode(self.deploy_hash.inner()) + match self.transaction_hash { + TransactionHash::Deploy(deploy_hash) => deploy_hash.encode_hex(), + TransactionHash::V1(v1_hash) => v1_hash.encode_hex(), + } } } @@ -162,12 +273,24 @@ impl Display for Fault { #[derive(Clone, Debug, Serialize, Deserialize, new)] pub struct FinalitySignature(Box); +impl From for FinSig { + fn from(val: FinalitySignature) -> Self { + *val.0 + } +} + impl FinalitySignature { #[cfg(test)] pub fn random(rng: &mut TestRng) -> Self { + let block_hash = BlockHash::random(rng); + let block_height = rng.gen::(); + let era_id = EraId::random(rng); + let chain_name_digest = ChainNameDigest::random(rng); Self(Box::new(FinSig::random_for_block( - BlockHash::random(rng), - rng.gen(), + block_hash, + block_height, + era_id, + chain_name_digest, rng, ))) } @@ -200,14 +323,17 @@ impl Step { pub fn random(rng: &mut TestRng) -> Self { use serde_json::value::to_raw_value; - let execution_effect = match rng.gen::() { - ExecutionResult::Success { effect, .. } | ExecutionResult::Failure { effect, .. } => { - effect - } - }; + let execution_effect = Effects::random(rng); Self { era_id: EraId::new(rng.gen()), execution_effect: to_raw_value(&execution_effect).unwrap(), } } } + +fn transaction_hash_to_identifier(transaction_hash: &TransactionHash) -> String { + match transaction_hash { + TransactionHash::Deploy(deploy) => hex::encode(deploy.inner()), + TransactionHash::V1(transaction) => hex::encode(transaction.inner()), + } +} diff --git a/listener/src/connection_manager.rs b/listener/src/connection_manager.rs index 2c71c221..5aaa84c2 100644 --- a/listener/src/connection_manager.rs +++ b/listener/src/connection_manager.rs @@ -375,7 +375,7 @@ pub mod tests { sse_connector::{tests::MockSseConnection, StreamConnector}, SseEvent, }; - use anyhow::Error; + use anyhow::anyhow; use casper_event_types::{sse_data::test_support::*, Filter}; use std::time::Duration; use tokio::{ @@ -411,8 +411,8 @@ pub mod tests { #[tokio::test] async fn given_data_without_api_version_should_fail() { let data = vec![ - example_block_added_1_5_2(BLOCK_HASH_1, "1"), - example_block_added_1_5_2(BLOCK_HASH_2, "2"), + example_block_added_2_0_0(BLOCK_HASH_1, "1"), + example_block_added_2_0_0(BLOCK_HASH_2, "2"), ]; let connector = Box::new(MockSseConnection::build_with_data(data)); let (mut connection_manager, _, _) = build_manager(connector); @@ -430,8 +430,8 @@ pub mod tests { async fn given_data_should_pass_data() { let data = vec![ example_api_version(), - example_block_added_1_5_2(BLOCK_HASH_1, "1"), - example_block_added_1_5_2(BLOCK_HASH_2, "2"), + example_block_added_2_0_0(BLOCK_HASH_1, "1"), + example_block_added_2_0_0(BLOCK_HASH_2, "2"), ]; let connector = Box::new(MockSseConnection::build_with_data(data)); let (mut connection_manager, data_tx, event_ids) = build_manager(connector); @@ -449,7 +449,7 @@ pub mod tests { let data = vec![ example_api_version(), "XYZ".to_string(), - example_block_added_1_5_2(BLOCK_HASH_2, "2"), + example_block_added_2_0_0(BLOCK_HASH_2, "2"), ]; let connector = Box::new(MockSseConnection::build_with_data(data)); let (mut connection_manager, data_tx, _event_ids) = build_manager(connector); @@ -493,7 +493,7 @@ pub mod tests { current_event_id: None, sse_event_sender: data_tx, maybe_tasks: None, - filter: Filter::Sigs, + filter: Filter::Events, current_event_id_sender: event_id_tx, api_version: None, }; @@ -521,8 +521,8 @@ pub mod tests { msg, } } - pub fn fail_fast(sender: Sender) -> Self { - let error = Error::msg("xyz"); + pub fn fail_fast(msg_postfix: &str, sender: Sender) -> Self { + let error = anyhow!("xyz-{}", msg_postfix); let a = Err(ConnectionManagerError::NonRecoverableError { error }); Self::new(Duration::from_millis(1), a, sender, None) } diff --git a/listener/src/connections_builder.rs b/listener/src/connections_builder.rs index 642e47c2..1893308a 100644 --- a/listener/src/connections_builder.rs +++ b/listener/src/connections_builder.rs @@ -95,7 +95,7 @@ impl DefaultConnectionsBuilder { } fn filters_from_version(_build_version: ProtocolVersion) -> Vec { - vec![Filter::Main, Filter::Sigs, Filter::Deploys] + vec![Filter::Events] } pub struct ConnectionConfig { @@ -219,15 +219,7 @@ pub mod tests { tx.clone(), Some(events_msg.as_str()), )); - let main_msg = format!("main-{}", msg_postfix); - let main: Box = Box::new(MockConnectionManager::ok_long( - tx.clone(), - Some(main_msg.as_str()), - )); - Ok(HashMap::from([ - (Filter::Events, events), - (Filter::Main, main), - ])) + Ok(HashMap::from([(Filter::Events, events)])) } fn response_with_failing_events( @@ -235,16 +227,8 @@ pub mod tests { tx: &Sender, ) -> Result>, Error> { let events: Box = - Box::new(MockConnectionManager::fail_fast(tx.clone())); - let main_msg = format!("main-{}", msg_postfix); - let main: Box = Box::new(MockConnectionManager::ok_long( - tx.clone(), - Some(main_msg.as_str()), - )); - Ok(HashMap::from([ - (Filter::Events, events), - (Filter::Main, main), - ])) + Box::new(MockConnectionManager::fail_fast(msg_postfix, tx.clone())); + Ok(HashMap::from([(Filter::Events, events)])) } #[async_trait] diff --git a/listener/src/lib.rs b/listener/src/lib.rs index 29149a29..6cc6f572 100644 --- a/listener/src/lib.rs +++ b/listener/src/lib.rs @@ -207,8 +207,9 @@ impl EventListener { match err { ConnectionManagerError::NonRecoverableError { error } => { error!( - "Restarting event listener {} because of NonRecoverableError: {}", + "Restarting event listener {}:{} because of NonRecoverableError: {}", self.node.ip_address.to_string(), + self.node.sse_port, error ); log_status_for_event_listener(EventListenerStatus::Reconnecting, self); @@ -217,7 +218,7 @@ impl EventListener { ConnectionManagerError::InitialConnectionError { error } => { //No futures_left means no more filters active, we need to restart the whole listener if futures_left.is_empty() { - error!("Restarting event listener {} because of no more active connections left: {}", self.node.ip_address.to_string(), error); + error!("Restarting event listener {}:{} because of no more active connections left: {}", self.node.ip_address.to_string(), self.node.sse_port, error); log_status_for_event_listener( EventListenerStatus::Reconnecting, self, @@ -355,22 +356,9 @@ mod tests { let err = run_event_listener(2, version_fetcher, connections_builder.clone(), true).await; - let received_data = connections_builder.get_received_data().await; - assert_eq!(received_data.len(), 2); - assert!(set_contains(received_data, vec!["main-1", "events-1"],)); - assert!(err.to_string().contains("Max connection attempts reached")); - } - - #[tokio::test] - async fn given_event_listener_should_fail_when_one_connection_manager_fails_other_does_not() { - let version_fetcher = MockVersionFetcher::repeatable_from_protocol_version("1.5.10"); - let connections_builder = Arc::new(MockConnectionsBuilder::one_fails_immediatly()); - - let err = run_event_listener(1, version_fetcher, connections_builder.clone(), true).await; - let received_data = connections_builder.get_received_data().await; assert_eq!(received_data.len(), 1); - assert!(set_contains(received_data, vec!["main-1"],)); + assert!(set_contains(received_data, vec!["events-1"],)); assert!(err.to_string().contains("Max connection attempts reached")); } @@ -388,14 +376,14 @@ mod tests { #[tokio::test] async fn given_event_listener_should_fetch_data_if_enough_reconnections() { - let version_fetcher = MockVersionFetcher::repeatable_from_protocol_version("1.5.10"); + let version_fetcher = MockVersionFetcher::repeatable_from_protocol_version("2.0.0"); let connections_builder = Arc::new(MockConnectionsBuilder::ok_after_two_fails()); let err = run_event_listener(3, version_fetcher, connections_builder.clone(), true).await; let received_data = connections_builder.get_received_data().await; - assert_eq!(received_data.len(), 2); - assert!(set_contains(received_data, vec!["main-2", "events-2"],)); + assert_eq!(received_data.len(), 1); + assert!(set_contains(received_data, vec!["events-2"],)); assert!(err.to_string().contains("Max connection attempts reached")); } diff --git a/resources/test/rpc_schema.json b/resources/test/rpc_schema.json index 0878d503..314d981d 100644 --- a/resources/test/rpc_schema.json +++ b/resources/test/rpc_schema.json @@ -2912,19 +2912,6 @@ } }, "additionalProperties": false - }, - { - "description": "Hex-encoded entity address of the initiator.", - "type": "object", - "required": [ - "EntityAddr" - ], - "properties": { - "EntityAddr": { - "type": "string" - } - }, - "additionalProperties": false } ] }, @@ -5013,6 +5000,19 @@ } }, "additionalProperties": false + }, + { + "description": "A NamedKey record.", + "type": "object", + "required": [ + "NamedKey" + ], + "properties": { + "NamedKey": { + "$ref": "#/components/schemas/NamedKeyValue" + } + }, + "additionalProperties": false } ] }, @@ -5442,32 +5442,32 @@ "action_thresholds", "associated_keys", "byte_code_hash", + "entity_kind", "entry_points", "main_purse", "message_topics", - "named_keys", "package_hash", "protocol_version" ], "properties": { + "protocol_version": { + "$ref": "#/components/schemas/ProtocolVersion" + }, + "entity_kind": { + "$ref": "#/components/schemas/EntityKind" + }, "package_hash": { "$ref": "#/components/schemas/PackageHash" }, "byte_code_hash": { "$ref": "#/components/schemas/ByteCodeHash" }, - "named_keys": { - "$ref": "#/components/schemas/NamedKeys" + "main_purse": { + "$ref": "#/components/schemas/URef" }, "entry_points": { "$ref": "#/components/schemas/Array_of_NamedEntryPoint" }, - "protocol_version": { - "$ref": "#/components/schemas/ProtocolVersion" - }, - "main_purse": { - "$ref": "#/components/schemas/URef" - }, "associated_keys": { "$ref": "#/components/schemas/EntityAssociatedKeys" }, @@ -5479,6 +5479,77 @@ } } }, + "EntityKind": { + "description": "The type of Package.", + "oneOf": [ + { + "description": "Package associated with a native contract implementation.", + "type": "object", + "required": [ + "System" + ], + "properties": { + "System": { + "$ref": "#/components/schemas/SystemEntityType" + } + }, + "additionalProperties": false + }, + { + "description": "Package associated with an Account hash.", + "type": "object", + "required": [ + "Account" + ], + "properties": { + "Account": { + "$ref": "#/components/schemas/AccountHash" + } + }, + "additionalProperties": false + }, + { + "description": "Packages associated with Wasm stored on chain.", + "type": "string", + "enum": [ + "SmartContract" + ] + } + ] + }, + "SystemEntityType": { + "description": "System contract types.\n\nUsed by converting to a `u32` and passing as the `system_contract_index` argument of `ext_ffi::casper_get_system_contract()`.", + "oneOf": [ + { + "description": "Mint contract.", + "type": "string", + "enum": [ + "Mint" + ] + }, + { + "description": "Handle Payment contract.", + "type": "string", + "enum": [ + "HandlePayment" + ] + }, + { + "description": "Standard Payment contract.", + "type": "string", + "enum": [ + "StandardPayment" + ] + }, + { + "description": "Auction contract.", + "type": "string", + "enum": [ + "Auction" + ] + } + ] + }, "ByteCodeHash": { "description": "The hash address of the contract wasm", "type": "string" @@ -5569,7 +5640,6 @@ "disabled_versions", "groups", "lock_status", - "package_kind", "versions" ], "properties": { @@ -5612,14 +5682,6 @@ "$ref": "#/components/schemas/PackageStatus" } ] - }, - "package_kind": { - "description": "The kind of package.", - "allOf": [ - { - "$ref": "#/components/schemas/PackageKind" - } - ] } } }, @@ -5693,77 +5755,6 @@ } ] }, - "PackageKind": { - "description": "The type of Package.", - "oneOf": [ - { - "description": "Package associated with a native contract implementation.", - "type": "object", - "required": [ - "System" - ], - "properties": { - "System": { - "$ref": "#/components/schemas/SystemEntityType" - } - }, - "additionalProperties": false - }, - { - "description": "Package associated with an Account hash.", - "type": "object", - "required": [ - "Account" - ], - "properties": { - "Account": { - "$ref": "#/components/schemas/AccountHash" - } - }, - "additionalProperties": false - }, - { - "description": "Packages associated with Wasm stored on chain.", - "type": "string", - "enum": [ - "SmartContract" - ] - } - ] - }, - "SystemEntityType": { - "description": "System contract types.\n\nUsed by converting to a `u32` and passing as the `system_contract_index` argument of `ext_ffi::casper_get_system_contract()`.", - "oneOf": [ - { - "description": "Mint contract.", - "type": "string", - "enum": [ - "Mint" - ] - }, - { - "description": "Handle Payment contract.", - "type": "string", - "enum": [ - "HandlePayment" - ] - }, - { - "description": "Standard Payment contract.", - "type": "string", - "enum": [ - "StandardPayment" - ] - }, - { - "description": "Auction contract.", - "type": "string", - "enum": [ - "Auction" - ] - } - ] - }, "ByteCode": { "description": "A container for contract's Wasm bytes.", "type": "object", @@ -5833,6 +5824,32 @@ "description": "Message checksum as a formatted string.", "type": "string" }, + "NamedKeyValue": { + "description": "A NamedKey value.", + "type": "object", + "required": [ + "name", + "named_key" + ], + "properties": { + "named_key": { + "description": "The actual `Key` encoded as a CLValue.", + "allOf": [ + { + "$ref": "#/components/schemas/CLValue" + } + ] + }, + "name": { + "description": "The name of the `Key` encoded as a CLValue.", + "allOf": [ + { + "$ref": "#/components/schemas/CLValue" + } + ] + } + } + }, "TransformError": { "description": "Error type for applying and combining transforms.\n\nA `TypeMismatch` occurs when a transform cannot be applied because the types are not compatible (e.g. trying to add a number to a string).", "oneOf": [ diff --git a/rpc_sidecar/Cargo.toml b/rpc_sidecar/Cargo.toml index 46f1ce52..7453d3f5 100644 --- a/rpc_sidecar/Cargo.toml +++ b/rpc_sidecar/Cargo.toml @@ -17,7 +17,7 @@ backtrace = "0.3.50" base16 = "0.2.1" bincode = "1" casper-json-rpc = { version = "1.0.0", path = "../json_rpc" } -casper-types-ver-2_0 = { workspace = true, features = ["datasize", "json-schema", "std"] } +casper-types = { workspace = true, features = ["datasize", "json-schema", "std"] } datasize = { workspace = true, features = ["detailed", "fake_clock-types"] } futures = { workspace = true } http = "0.2.1" @@ -41,7 +41,7 @@ warp = { version = "0.3.6", features = ["compression"] } [dev-dependencies] assert-json-diff = "2" bytes = "1.5.0" -casper-types-ver-2_0 = { workspace = true, features = ["datasize", "json-schema", "std", "testing"] } +casper-types = { workspace = true, features = ["datasize", "json-schema", "std", "testing"] } portpicker = "0.1.1" pretty_assertions = "0.7.2" regex = "1" @@ -55,7 +55,7 @@ vergen = { version = "8.2.1", default-features = false, features = [ ] } [features] -testing = ["casper-types-ver-2_0/testing"] +testing = ["casper-types/testing"] [package.metadata.deb] revision = "0" diff --git a/rpc_sidecar/src/lib.rs b/rpc_sidecar/src/lib.rs index 0a3035aa..4c52bc3f 100644 --- a/rpc_sidecar/src/lib.rs +++ b/rpc_sidecar/src/lib.rs @@ -8,7 +8,7 @@ mod speculative_exec_server; pub(crate) mod testing; use anyhow::Error; -use casper_types_ver_2_0::ProtocolVersion; +use casper_types::ProtocolVersion; pub use config::{FieldParseError, RpcServerConfig, RpcServerConfigTarget}; pub use config::{NodeClientConfig, RpcConfig}; use futures::FutureExt; diff --git a/rpc_sidecar/src/node_client.rs b/rpc_sidecar/src/node_client.rs index 29f4bf16..34e54293 100644 --- a/rpc_sidecar/src/node_client.rs +++ b/rpc_sidecar/src/node_client.rs @@ -10,7 +10,7 @@ use std::{ }; use crate::{config::ExponentialBackoffConfig, NodeClientConfig, SUPPORTED_PROTOCOL_VERSION}; -use casper_types_ver_2_0::{ +use casper_types::{ binary_port::{ BinaryRequest, BinaryRequestHeader, BinaryResponse, BinaryResponseAndRequest, ConsensusValidatorChanges, ErrorCode as BinaryPortError, GetRequest, GetTrieFullResult, @@ -443,8 +443,8 @@ mod tests { use crate::testing::BinaryPortMock; use super::*; - use casper_types_ver_2_0::testing::TestRng; - use casper_types_ver_2_0::{CLValue, SemVer}; + use casper_types::testing::TestRng; + use casper_types::{CLValue, SemVer}; use futures::FutureExt; use tokio::task::JoinHandle; use tokio::time::sleep; diff --git a/rpc_sidecar/src/rpcs.rs b/rpc_sidecar/src/rpcs.rs index a1c177d5..9eb3a479 100644 --- a/rpc_sidecar/src/rpcs.rs +++ b/rpc_sidecar/src/rpcs.rs @@ -29,7 +29,7 @@ use casper_json_rpc::{ CorsOrigin, Error as RpcError, Params, RequestHandlers, RequestHandlersBuilder, ReservedErrorCode, }; -use casper_types_ver_2_0::SemVer; +use casper_types::SemVer; pub use common::ErrorData; use docs::DocExample; @@ -392,7 +392,7 @@ mod tests { use warp::{filters::BoxedFilter, Filter, Reply}; use casper_json_rpc::{filters, Response}; - use casper_types_ver_2_0::DeployHash; + use casper_types::DeployHash; use super::*; @@ -544,7 +544,7 @@ mod tests { } mod rpc_with_optional_params { - use casper_types_ver_2_0::BlockIdentifier; + use casper_types::BlockIdentifier; use crate::rpcs::chain::{GetBlock, GetBlockParams, GetBlockResult}; diff --git a/rpc_sidecar/src/rpcs/account.rs b/rpc_sidecar/src/rpcs/account.rs index d18ad81e..a22cc403 100644 --- a/rpc_sidecar/src/rpcs/account.rs +++ b/rpc_sidecar/src/rpcs/account.rs @@ -7,7 +7,7 @@ use once_cell::sync::Lazy; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use casper_types_ver_2_0::{Deploy, DeployHash, Transaction, TransactionHash}; +use casper_types::{Deploy, DeployHash, Transaction, TransactionHash}; use super::{ docs::{DocExample, DOCS_EXAMPLE_API_VERSION}, @@ -151,7 +151,7 @@ impl RpcWithParams for PutTransaction { #[cfg(test)] mod tests { - use casper_types_ver_2_0::{ + use casper_types::{ binary_port::{ BinaryRequest, BinaryResponse, BinaryResponseAndRequest, ErrorCode as BinaryPortErrorCode, diff --git a/rpc_sidecar/src/rpcs/chain.rs b/rpc_sidecar/src/rpcs/chain.rs index 3c4593bf..9d76ca86 100644 --- a/rpc_sidecar/src/rpcs/chain.rs +++ b/rpc_sidecar/src/rpcs/chain.rs @@ -9,7 +9,7 @@ use once_cell::sync::Lazy; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use casper_types_ver_2_0::{ +use casper_types::{ BlockHash, BlockHeader, BlockHeaderV2, BlockIdentifier, Digest, GlobalStateIdentifier, JsonBlockWithSignatures, Key, StoredValue, Transfer, }; @@ -394,14 +394,11 @@ mod tests { use std::convert::TryFrom; use crate::{ClientError, SUPPORTED_PROTOCOL_VERSION}; - use casper_types_ver_2_0::{ + use casper_types::{ binary_port::{ BinaryRequest, BinaryResponse, BinaryResponseAndRequest, GetRequest, GlobalStateQueryResult, GlobalStateRequest, InformationRequestTag, RecordId, - }, - system::auction::EraInfo, - testing::TestRng, - Block, BlockSignatures, DeployHash, SignedBlock, TestBlockBuilder, TestBlockV1Builder, + }, system::auction::EraInfo, testing::TestRng, Block, BlockSignaturesV1, BlockSignaturesV2, ChainNameDigest, DeployHash, SignedBlock, TestBlockBuilder, TestBlockV1Builder }; use rand::Rng; @@ -412,12 +409,12 @@ mod tests { async fn should_read_block_v2() { let rng = &mut TestRng::new(); let block = Block::V2(TestBlockBuilder::new().build(rng)); - + let signatures = BlockSignaturesV2::new(*block.hash(), block.height(), block.era_id(), ChainNameDigest::random(rng)); let resp = GetBlock::do_handle_request( Arc::new(ValidBlockMock { block: SignedBlock::new( block.clone(), - BlockSignatures::new(*block.hash(), block.era_id()), + signatures.into(), ), transfers: vec![], }), @@ -444,7 +441,7 @@ mod tests { Arc::new(ValidBlockMock { block: SignedBlock::new( Block::V1(block.clone()), - BlockSignatures::new(*block.hash(), block.era_id()), + BlockSignaturesV1::new(*block.hash(), block.era_id()).into(), ), transfers: vec![], }), @@ -480,12 +477,12 @@ mod tests { Some(rng.gen()), )); } - + let signatures = BlockSignaturesV2::new(*block.hash(), block.height(), block.era_id(), ChainNameDigest::random(rng)); let resp = GetBlockTransfers::do_handle_request( Arc::new(ValidBlockMock { block: SignedBlock::new( Block::V2(block.clone()), - BlockSignatures::new(*block.hash(), block.era_id()), + signatures.into(), ), transfers: transfers.clone(), }), @@ -509,11 +506,12 @@ mod tests { let rng = &mut TestRng::new(); let block = TestBlockBuilder::new().build(rng); + let signatures = BlockSignaturesV2::new(*block.hash(), block.height(), block.era_id(), ChainNameDigest::random(rng)); let resp = GetStateRootHash::do_handle_request( Arc::new(ValidBlockMock { block: SignedBlock::new( Block::V2(block.clone()), - BlockSignatures::new(*block.hash(), block.era_id()), + signatures.into(), ), transfers: vec![], }), diff --git a/rpc_sidecar/src/rpcs/chain/era_summary.rs b/rpc_sidecar/src/rpcs/chain/era_summary.rs index bd861b38..9a69c46d 100644 --- a/rpc_sidecar/src/rpcs/chain/era_summary.rs +++ b/rpc_sidecar/src/rpcs/chain/era_summary.rs @@ -2,7 +2,7 @@ use once_cell::sync::Lazy; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use casper_types_ver_2_0::{ +use casper_types::{ system::auction::{EraInfo, SeigniorageAllocation}, AsymmetricType, BlockHash, BlockV2, Digest, EraId, PublicKey, StoredValue, U512, }; diff --git a/rpc_sidecar/src/rpcs/common.rs b/rpc_sidecar/src/rpcs/common.rs index 913bd661..36f5d503 100644 --- a/rpc_sidecar/src/rpcs/common.rs +++ b/rpc_sidecar/src/rpcs/common.rs @@ -3,7 +3,7 @@ use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use crate::rpcs::error::Error; -use casper_types_ver_2_0::{ +use casper_types::{ account::AccountHash, AddressableEntity, AvailableBlockRange, BlockHeader, BlockIdentifier, GlobalStateIdentifier, Key, SignedBlock, StoredValue, URef, U512, }; diff --git a/rpc_sidecar/src/rpcs/docs.rs b/rpc_sidecar/src/rpcs/docs.rs index 9a4ea782..04668719 100644 --- a/rpc_sidecar/src/rpcs/docs.rs +++ b/rpc_sidecar/src/rpcs/docs.rs @@ -462,7 +462,7 @@ impl RpcWithoutParams for ListRpcs { } mod doc_example_impls { - use casper_types_ver_2_0::{ + use casper_types::{ account::Account, AuctionState, Deploy, EraEndV1, EraEndV2, EraReport, PublicKey, Timestamp, Transaction, }; diff --git a/rpc_sidecar/src/rpcs/error.rs b/rpc_sidecar/src/rpcs/error.rs index 30391376..afce7820 100644 --- a/rpc_sidecar/src/rpcs/error.rs +++ b/rpc_sidecar/src/rpcs/error.rs @@ -1,6 +1,6 @@ use crate::node_client::Error as NodeClientError; use casper_json_rpc::Error as RpcError; -use casper_types_ver_2_0::{ +use casper_types::{ AvailableBlockRange, BlockIdentifier, DeployHash, KeyFromStrError, KeyTag, TransactionHash, URefFromStrError, }; diff --git a/rpc_sidecar/src/rpcs/info.rs b/rpc_sidecar/src/rpcs/info.rs index e2f7fd6d..b727c363 100644 --- a/rpc_sidecar/src/rpcs/info.rs +++ b/rpc_sidecar/src/rpcs/info.rs @@ -7,7 +7,7 @@ use once_cell::sync::Lazy; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use casper_types_ver_2_0::{ +use casper_types::{ binary_port::MinimalBlockInfo, execution::{ExecutionResult, ExecutionResultV2}, ActivationPoint, AvailableBlockRange, Block, BlockSynchronizerStatus, ChainspecRawBytes, @@ -519,11 +519,12 @@ mod tests { use std::convert::TryFrom; use crate::{rpcs::ErrorCode, ClientError, SUPPORTED_PROTOCOL_VERSION}; - use casper_types_ver_2_0::{ + use casper_types::{ binary_port::{ BinaryRequest, BinaryResponse, BinaryResponseAndRequest, GetRequest, InformationRequestTag, TransactionWithExecutionInfo, }, + bytesrepr::{FromBytes, ToBytes}, testing::TestRng, BlockHash, TransactionV1, }; @@ -543,12 +544,12 @@ mod tests { }; let resp = GetTransaction::do_handle_request( - Arc::new(ValidTransactionMock { - transaction: TransactionWithExecutionInfo::new( + Arc::new(ValidTransactionMock::new( + TransactionWithExecutionInfo::new( transaction.clone(), Some(execution_info.clone()), ), - }), + )), GetTransactionParams { transaction_hash: transaction.hash(), finalized_approvals: true, @@ -578,12 +579,12 @@ mod tests { }; let resp = GetTransaction::do_handle_request( - Arc::new(ValidTransactionMock { - transaction: TransactionWithExecutionInfo::new( + Arc::new(ValidTransactionMock::new( + TransactionWithExecutionInfo::new( Transaction::Deploy(deploy.clone()), Some(execution_info.clone()), ), - }), + )), GetTransactionParams { transaction_hash: deploy.hash().into(), finalized_approvals: true, @@ -613,12 +614,12 @@ mod tests { }; let resp = GetDeploy::do_handle_request( - Arc::new(ValidTransactionMock { - transaction: TransactionWithExecutionInfo::new( + Arc::new(ValidTransactionMock::new( + TransactionWithExecutionInfo::new( Transaction::Deploy(deploy.clone()), Some(execution_info.clone()), ), - }), + )), GetDeployParams { deploy_hash: *deploy.hash(), finalized_approvals: true, @@ -648,12 +649,12 @@ mod tests { }; let err = GetDeploy::do_handle_request( - Arc::new(ValidTransactionMock { - transaction: TransactionWithExecutionInfo::new( + Arc::new(ValidTransactionMock::new( + TransactionWithExecutionInfo::new( Transaction::V1(transaction.clone()), Some(execution_info.clone()), ), - }), + )), GetDeployParams { deploy_hash: DeployHash::new(*transaction.hash().inner()), finalized_approvals: true, @@ -666,7 +667,14 @@ mod tests { } struct ValidTransactionMock { - transaction: TransactionWithExecutionInfo, + transaction_bytes: Vec, + } + + impl ValidTransactionMock { + fn new(info: TransactionWithExecutionInfo) -> Self { + let transaction_bytes = info.to_bytes().expect("should serialize transaction"); + ValidTransactionMock { transaction_bytes } + } } #[async_trait] @@ -680,11 +688,11 @@ mod tests { if InformationRequestTag::try_from(info_type_tag) == Ok(InformationRequestTag::Transaction) => { + let (transaction, _) = + TransactionWithExecutionInfo::from_bytes(&self.transaction_bytes) + .expect("should deserialize transaction"); Ok(BinaryResponseAndRequest::new( - BinaryResponse::from_value( - self.transaction.clone(), - SUPPORTED_PROTOCOL_VERSION, - ), + BinaryResponse::from_value(transaction, SUPPORTED_PROTOCOL_VERSION), &[], )) } diff --git a/rpc_sidecar/src/rpcs/speculative_exec.rs b/rpc_sidecar/src/rpcs/speculative_exec.rs index c3fc5d97..347f05dd 100644 --- a/rpc_sidecar/src/rpcs/speculative_exec.rs +++ b/rpc_sidecar/src/rpcs/speculative_exec.rs @@ -7,7 +7,7 @@ use once_cell::sync::Lazy; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use casper_types_ver_2_0::{ +use casper_types::{ contract_messages::Messages, execution::ExecutionResultV2, BlockHash, BlockIdentifier, Deploy, Transaction, }; @@ -158,7 +158,7 @@ async fn handle_request( mod tests { use std::convert::TryFrom; - use casper_types_ver_2_0::{ + use casper_types::{ binary_port::{ BinaryRequest, BinaryResponse, BinaryResponseAndRequest, GetRequest, InformationRequestTag, SpeculativeExecutionResult, diff --git a/rpc_sidecar/src/rpcs/state.rs b/rpc_sidecar/src/rpcs/state.rs index e614a37f..77d8689c 100644 --- a/rpc_sidecar/src/rpcs/state.rs +++ b/rpc_sidecar/src/rpcs/state.rs @@ -14,10 +14,10 @@ use super::{ ApiVersion, Error, NodeClient, RpcError, RpcWithOptionalParams, RpcWithParams, CURRENT_API_VERSION, }; -use casper_types_ver_2_0::{ +use casper_types::{ account::{Account, AccountHash}, + addressable_entity::EntityKindTag, bytesrepr::Bytes, - package::PackageKindTag, system::{ auction::{ EraValidators, SeigniorageRecipientsSnapshot, ValidatorWeights, @@ -312,7 +312,7 @@ impl RpcWithOptionalParams for GetAuctionInfo { .collect::, Error>>()?; let (registry_value, _) = node_client - .query_global_state(state_identifier, Key::SystemContractRegistry, vec![]) + .query_global_state(state_identifier, Key::SystemEntityRegistry, vec![]) .await .map_err(|err| Error::NodeRequest("system contract registry", err))? .ok_or(Error::GlobalStateEntryNotFound)? @@ -324,7 +324,7 @@ impl RpcWithOptionalParams for GetAuctionInfo { .map_err(|_| Error::InvalidAuctionContract)?; let &auction_hash = registry.get(AUCTION).ok_or(Error::InvalidAuctionContract)?; - let auction_key = Key::addressable_entity_key(PackageKindTag::System, auction_hash); + let auction_key = Key::addressable_entity_key(EntityKindTag::System, auction_hash); let (snapshot_value, _) = node_client .query_global_state( state_identifier, @@ -491,7 +491,7 @@ impl DictionaryIdentifier { } => { let named_keys = match &maybe_stored_value { Some(StoredValue::Account(account)) => account.named_keys(), - Some(StoredValue::AddressableEntity(contract)) => contract.named_keys(), + Some(StoredValue::Contract(contract)) => contract.named_keys(), Some(other) => { return Err(Error::InvalidTypeUnderDictionaryKey(other.type_name())) } @@ -838,15 +838,17 @@ mod tests { use std::{convert::TryFrom, iter}; use crate::{ClientError, SUPPORTED_PROTOCOL_VERSION}; - use casper_types_ver_2_0::{ - addressable_entity::{ActionThresholds, AssociatedKeys, MessageTopics, NamedKeys}, + use casper_types::{ + addressable_entity::{ + ActionThresholds, AssociatedKeys, EntityKindTag, MessageTopics, NamedKeys, + }, binary_port::{ BinaryRequest, BinaryResponse, BinaryResponseAndRequest, GetRequest, GlobalStateQueryResult, GlobalStateRequest, InformationRequestTag, }, system::auction::BidKind, testing::TestRng, - AccessRights, AddressableEntity, Block, ByteCodeHash, EntryPoints, PackageHash, + AccessRights, AddressableEntity, Block, ByteCodeHash, EntityKind, EntryPoints, PackageHash, ProtocolVersion, TestBlockBuilder, }; use rand::Rng; @@ -955,7 +957,7 @@ mod tests { )) } BinaryRequest::Get(GetRequest::State(GlobalStateRequest::Item { - base_key: Key::SystemContractRegistry, + base_key: Key::SystemEntityRegistry, .. })) => { let system_contracts = @@ -971,7 +973,7 @@ mod tests { )) } BinaryRequest::Get(GetRequest::State(GlobalStateRequest::Item { - base_key: Key::AddressableEntity(_, _), + base_key: Key::AddressableEntity(_), .. })) => { let result = GlobalStateQueryResult::new( @@ -1119,7 +1121,7 @@ mod tests { #[tokio::test] async fn should_read_query_balance_by_account_result() { - use casper_types_ver_2_0::account::{ActionThresholds, AssociatedKeys}; + use casper_types::account::{ActionThresholds, AssociatedKeys}; struct ClientMock { block: Block, @@ -1247,7 +1249,7 @@ mod tests { .. })) => { let key = - Key::addressable_entity_key(PackageKindTag::Account, self.entity_hash); + Key::addressable_entity_key(EntityKindTag::Account, self.entity_hash); let value = CLValue::from_t(key).unwrap(); Ok(BinaryResponseAndRequest::new( BinaryResponse::from_value( @@ -1261,7 +1263,7 @@ mod tests { )) } BinaryRequest::Get(GetRequest::State(GlobalStateRequest::Item { - base_key: Key::AddressableEntity(_, _), + base_key: Key::AddressableEntity(_), .. })) => Ok(BinaryResponseAndRequest::new( BinaryResponse::from_value( @@ -1296,13 +1298,13 @@ mod tests { let entity = AddressableEntity::new( PackageHash::new(rng.gen()), ByteCodeHash::new(rng.gen()), - NamedKeys::default(), EntryPoints::default(), ProtocolVersion::V1_0_0, rng.gen(), AssociatedKeys::default(), ActionThresholds::default(), MessageTopics::default(), + EntityKind::default(), ); let balance: U512 = rng.gen(); diff --git a/types/src/block.rs b/types/src/block.rs deleted file mode 100644 index 51359ad5..00000000 --- a/types/src/block.rs +++ /dev/null @@ -1,654 +0,0 @@ -#[cfg(feature = "sse-data-testing")] -use casper_types::{bytesrepr, bytesrepr::ToBytes, crypto, testing::TestRng, SecretKey}; -use casper_types::{EraId, ProtocolVersion, PublicKey, Signature, Timestamp, U512}; -#[cfg(feature = "sse-data-testing")] -use rand::Rng; -use serde::{Deserialize, Serialize}; -#[cfg(feature = "sse-data-testing")] -use std::iter; -use std::{ - collections::BTreeMap, - fmt::{self, Display, Formatter}, - hash::Hash, -}; -use utoipa::ToSchema; - -use crate::{DeployHash, Digest}; - -/// A cryptographic hash identifying a [`Block`]. -#[derive( - Copy, - Clone, - Default, - Ord, - PartialOrd, - Eq, - PartialEq, - Hash, - Serialize, - Deserialize, - Debug, - ToSchema, -)] -#[serde(deny_unknown_fields)] -pub struct BlockHash(Digest); - -impl BlockHash { - /// Returns the wrapped inner hash. - pub fn inner(&self) -> &Digest { - &self.0 - } -} - -impl Display for BlockHash { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - write!(formatter, "block-hash({})", self.0,) - } -} - -#[cfg(feature = "sse-data-testing")] -impl BlockHash { - /// Creates a random block hash. - pub fn random(rng: &mut TestRng) -> Self { - let hash = Digest::from(rng.gen::<[u8; Digest::LENGTH]>()); - BlockHash(hash) - } -} - -#[cfg(feature = "sse-data-testing")] -impl ToBytes for BlockHash { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - self.0.to_bytes() - } - - fn serialized_length(&self) -> usize { - self.0.serialized_length() - } -} - -#[derive(Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Serialize, Deserialize, Debug, ToSchema)] -pub struct EraReport { - #[schema(value_type = Vec)] - equivocators: Vec, - #[schema(value_type = Map)] - rewards: BTreeMap, - #[schema(value_type = Vec)] - inactive_validators: Vec, -} - -#[cfg(feature = "sse-data-testing")] -impl ToBytes for EraReport { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - buffer.extend(self.equivocators.to_bytes()?); - buffer.extend(self.rewards.to_bytes()?); - buffer.extend(self.inactive_validators.to_bytes()?); - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - self.equivocators.serialized_length() - + self.rewards.serialized_length() - + self.inactive_validators.serialized_length() - } -} - -#[derive(Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Serialize, Deserialize, Debug)] -/// A struct to contain information related to the end of an era and validator weights for the -/// following era. -pub struct EraEnd { - /// The era end information. - era_report: EraReport, - /// The validator weights for the next era. - next_era_validator_weights: BTreeMap, -} - -#[cfg(feature = "sse-data-testing")] -impl ToBytes for EraEnd { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - buffer.extend(self.era_report.to_bytes()?); - buffer.extend(self.next_era_validator_weights.to_bytes()?); - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - self.era_report.serialized_length() + self.next_era_validator_weights.serialized_length() - } -} - -/// The header portion of a [`Block`]. -#[derive(Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Serialize, Deserialize, Debug, ToSchema)] -pub struct BlockHeader { - parent_hash: BlockHash, - state_root_hash: Digest, - body_hash: Digest, - random_bit: bool, - accumulated_seed: Digest, - era_end: Option, - #[schema(value_type = String)] - timestamp: Timestamp, - #[schema(value_type = u64)] - era_id: EraId, - height: u64, - /// The protocol version. - #[schema(value_type = String)] - protocol_version: ProtocolVersion, -} - -#[cfg(feature = "sse-data-testing")] -impl ToBytes for BlockHeader { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - buffer.extend(self.parent_hash.to_bytes()?); - buffer.extend(self.state_root_hash.to_bytes()?); - buffer.extend(self.body_hash.to_bytes()?); - buffer.extend(self.random_bit.to_bytes()?); - buffer.extend(self.accumulated_seed.to_bytes()?); - buffer.extend(self.era_end.to_bytes()?); - buffer.extend(self.timestamp.to_bytes()?); - buffer.extend(self.era_id.to_bytes()?); - buffer.extend(self.height.to_bytes()?); - buffer.extend(self.protocol_version.to_bytes()?); - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - self.parent_hash.serialized_length() - + self.state_root_hash.serialized_length() - + self.body_hash.serialized_length() - + self.random_bit.serialized_length() - + self.accumulated_seed.serialized_length() - + self.era_end.serialized_length() - + self.timestamp.serialized_length() - + self.era_id.serialized_length() - + self.height.serialized_length() - + self.protocol_version.serialized_length() - } -} - -#[derive(Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Serialize, Deserialize, Debug, ToSchema)] -pub struct BlockBody { - /// "Hex-encoded cryptographic public key, including the algorithm tag prefix." - #[schema(value_type = String)] - proposer: PublicKey, - deploy_hashes: Vec, - transfer_hashes: Vec, -} - -#[cfg(feature = "sse-data-testing")] -impl ToBytes for BlockBody { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - buffer.extend(self.proposer.to_bytes()?); - buffer.extend(self.deploy_hashes.to_bytes()?); - buffer.extend(self.transfer_hashes.to_bytes()?); - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - self.proposer.serialized_length() - + self.deploy_hashes.serialized_length() - + self.transfer_hashes.serialized_length() - } -} - -#[derive(Clone, Debug, PartialOrd, Ord, PartialEq, Eq, Hash, Serialize, Deserialize)] -pub struct Block { - hash: BlockHash, - header: BlockHeader, - body: BlockBody, -} - -#[cfg(feature = "sse-data-testing")] -impl Block { - /// The hash of this block's header. - pub fn hash(&self) -> &BlockHash { - &self.hash - } - - pub fn random(rng: &mut TestRng) -> Self { - // Create the block body. - let proposer = PublicKey::random(rng); - let deploy_count = rng.gen_range(0..11); - let deploy_hashes = iter::repeat_with(|| DeployHash::new(Digest::random(rng))) - .take(deploy_count) - .collect(); - let transfer_count = rng.gen_range(0..11); - let transfer_hashes = iter::repeat_with(|| DeployHash::new(Digest::random(rng))) - .take(transfer_count) - .collect(); - let body = BlockBody { - proposer, - deploy_hashes, - transfer_hashes, - }; - // Create the block header. - let header = random_block_header(rng, &body); - - // Create the block hash. - let serialized_header = header - .to_bytes() - .unwrap_or_else(|error| panic!("should serialize block header: {}", error)); - let hash = BlockHash(Digest::hash(serialized_header)); - - Block { hash, header, body } - } -} - -#[cfg(feature = "sse-data-testing")] -fn random_block_header(rng: &mut TestRng, body: &BlockBody) -> BlockHeader { - let parent_hash = BlockHash(Digest::random(rng)); - let state_root_hash = Digest::random(rng); - let serialized_body = body - .to_bytes() - .unwrap_or_else(|error| panic!("should serialize block body: {}", error)); - let body_hash = Digest::hash(serialized_body); - let random_bit = rng.gen(); - let accumulated_seed = Digest::random(rng); - let is_switch = rng.gen_bool(0.1); - let era_end = if is_switch { - Some(random_era_end(rng)) - } else { - None - }; - let timestamp = Timestamp::now(); - let era = rng.gen_range(1..6); - let height = era * 10 + rng.gen_range(0..10); - BlockHeader { - parent_hash, - state_root_hash, - body_hash, - random_bit, - accumulated_seed, - era_end, - timestamp, - era_id: EraId::new(era), - height, - protocol_version: ProtocolVersion::V1_0_0, - } -} - -#[cfg(feature = "sse-data-testing")] -fn random_era_end(rng: &mut TestRng) -> EraEnd { - const BLOCK_REWARD: u64 = 1_000_000_000_000; - let equivocators_count = rng.gen_range(0..5); - let rewards_count = rng.gen_range(0..5); - let inactive_count = rng.gen_range(0..5); - let era_report = EraReport { - equivocators: iter::repeat_with(|| PublicKey::random(rng)) - .take(equivocators_count) - .collect(), - rewards: iter::repeat_with(|| { - let public_key = PublicKey::random(rng); - let reward = rng.gen_range(1..(BLOCK_REWARD + 1)); - (public_key, reward) - }) - .take(rewards_count) - .collect(), - inactive_validators: iter::repeat_with(|| PublicKey::random(rng)) - .take(inactive_count) - .collect(), - }; - let validator_count = rng.gen_range(0..11); - let next_era_validator_weights = iter::repeat_with(|| (PublicKey::random(rng), rng.gen())) - .take(validator_count) - .collect(); - EraEnd { - era_report, - next_era_validator_weights, - } -} - -#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, ToSchema)] -pub struct FinalitySignature { - block_hash: BlockHash, - #[schema(value_type = u64)] - era_id: EraId, - #[schema(value_type = String)] - signature: Signature, - /// "Hex-encoded cryptographic public key, including the algorithm tag prefix." - #[schema(value_type = String)] - public_key: PublicKey, -} - -impl FinalitySignature { - /// Hash of a block this signature is for. - pub fn block_hash(&self) -> &BlockHash { - &self.block_hash - } - - /// Era in which the block was created in. - pub fn era_id(&self) -> EraId { - self.era_id - } - - /// Signature over the block hash. - pub fn signature(&self) -> &Signature { - &self.signature - } - - /// Public key of the signing validator. - pub fn public_key(&self) -> &PublicKey { - &self.public_key - } -} - -#[cfg(feature = "sse-data-testing")] -impl FinalitySignature { - pub fn random_for_block(block_hash: BlockHash, era_id: u64, rng: &mut TestRng) -> Self { - let mut bytes = block_hash.inner().into_vec(); - bytes.extend_from_slice(&era_id.to_le_bytes()); - let secret_key = SecretKey::random(rng); - let public_key = PublicKey::from(&secret_key); - let signature = crypto::sign(bytes, &secret_key, &public_key); - - FinalitySignature { - block_hash, - era_id: EraId::new(era_id), - signature, - public_key, - } - } -} - -pub mod json_compatibility { - use super::*; - use casper_types::PublicKey; - use utoipa::ToSchema; - - #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, ToSchema)] - #[serde(deny_unknown_fields)] - pub struct Reward { - /// "Hex-encoded cryptographic public key, including the algorithm tag prefix." - #[schema(value_type = String)] - validator: PublicKey, - #[schema(value_type = String)] - amount: u64, - } - - #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, ToSchema)] - #[serde(deny_unknown_fields)] - pub struct ValidatorWeight { - /// "Hex-encoded cryptographic public key, including the algorithm tag prefix." - #[schema(value_type = String)] - validator: PublicKey, - #[schema(value_type = String)] - weight: U512, - } - - /// Equivocation and reward information to be included in the terminal block. - #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, ToSchema)] - #[serde(deny_unknown_fields)] - pub struct JsonEraReport { - #[schema(value_type = Vec)] - equivocators: Vec, - rewards: Vec, - #[schema(value_type = Vec)] - inactive_validators: Vec, - } - - impl From for JsonEraReport { - fn from(era_report: EraReport) -> Self { - JsonEraReport { - equivocators: era_report.equivocators, - rewards: era_report - .rewards - .into_iter() - .map(|(validator, amount)| Reward { validator, amount }) - .collect(), - inactive_validators: era_report.inactive_validators, - } - } - } - - impl From for EraReport { - fn from(era_report: JsonEraReport) -> Self { - let equivocators = era_report.equivocators; - let rewards = era_report - .rewards - .into_iter() - .map(|reward| (reward.validator, reward.amount)) - .collect(); - let inactive_validators = era_report.inactive_validators; - EraReport { - equivocators, - rewards, - inactive_validators, - } - } - } - - #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, ToSchema)] - #[serde(deny_unknown_fields)] - pub struct JsonEraEnd { - era_report: JsonEraReport, - next_era_validator_weights: Vec, - } - - impl From for JsonEraEnd { - fn from(data: EraEnd) -> Self { - let json_era_end = JsonEraReport::from(data.era_report); - let json_validator_weights = data - .next_era_validator_weights - .iter() - .map(|(validator, weight)| ValidatorWeight { - validator: validator.clone(), - weight: *weight, - }) - .collect(); - JsonEraEnd { - era_report: json_era_end, - next_era_validator_weights: json_validator_weights, - } - } - } - - impl From for EraEnd { - fn from(json_data: JsonEraEnd) -> Self { - let era_report = EraReport::from(json_data.era_report); - let next_era_validator_weights = json_data - .next_era_validator_weights - .iter() - .map(|validator_weight| { - (validator_weight.validator.clone(), validator_weight.weight) - }) - .collect(); - EraEnd { - era_report, - next_era_validator_weights, - } - } - } - - /// JSON representation of a block header. - #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, ToSchema)] - #[serde(deny_unknown_fields)] - pub struct JsonBlockHeader { - /// The parent hash. - pub parent_hash: BlockHash, - /// The state root hash. - pub state_root_hash: Digest, - /// The body hash. - pub body_hash: Digest, - /// Randomness bit. - pub random_bit: bool, - /// Accumulated seed. - pub accumulated_seed: Digest, - /// The era end. - pub era_end: Option, - /// The block timestamp. - #[schema(value_type = String)] - pub timestamp: Timestamp, - /// The block era id. - #[schema(value_type = u64)] - pub era_id: EraId, - /// The block height. - pub height: u64, - /// The protocol version. - #[schema(value_type = String)] - pub protocol_version: ProtocolVersion, - } - - impl From for JsonBlockHeader { - fn from(block_header: BlockHeader) -> Self { - JsonBlockHeader { - parent_hash: block_header.parent_hash, - state_root_hash: block_header.state_root_hash, - body_hash: block_header.body_hash, - random_bit: block_header.random_bit, - accumulated_seed: block_header.accumulated_seed, - era_end: block_header.era_end.map(JsonEraEnd::from), - timestamp: block_header.timestamp, - era_id: block_header.era_id, - height: block_header.height, - protocol_version: block_header.protocol_version, - } - } - } - - impl From for BlockHeader { - fn from(block_header: JsonBlockHeader) -> Self { - BlockHeader { - parent_hash: block_header.parent_hash, - state_root_hash: block_header.state_root_hash, - body_hash: block_header.body_hash, - random_bit: block_header.random_bit, - accumulated_seed: block_header.accumulated_seed, - era_end: block_header.era_end.map(EraEnd::from), - timestamp: block_header.timestamp, - era_id: block_header.era_id, - height: block_header.height, - protocol_version: block_header.protocol_version, - } - } - } - - /// A JSON-friendly representation of `Body` - #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, ToSchema)] - #[serde(deny_unknown_fields)] - pub struct JsonBlockBody { - /// "Hex-encoded cryptographic public key, including the algorithm tag prefix." - #[schema(value_type = String)] - proposer: PublicKey, - deploy_hashes: Vec, - transfer_hashes: Vec, - } - - impl From for JsonBlockBody { - fn from(body: BlockBody) -> Self { - JsonBlockBody { - proposer: body.proposer.clone(), - deploy_hashes: body.deploy_hashes.clone(), - transfer_hashes: body.transfer_hashes, - } - } - } - - impl From for BlockBody { - fn from(json_body: JsonBlockBody) -> Self { - BlockBody { - proposer: json_body.proposer, - deploy_hashes: json_body.deploy_hashes, - transfer_hashes: json_body.transfer_hashes, - } - } - } - - /// A JSON-friendly representation of `Block`. - #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, ToSchema)] - #[serde(deny_unknown_fields)] - pub struct JsonBlock { - /// `BlockHash` - pub hash: BlockHash, - /// JSON-friendly block header. - pub header: JsonBlockHeader, - /// JSON-friendly block body. - pub body: JsonBlockBody, - /// JSON-friendly list of proofs for this block. - pub proofs: Vec, - } - - impl JsonBlock { - /// Creates a new JSON block with no proofs from a linear chain block. - pub fn new_unsigned(block: Block) -> Self { - JsonBlock { - hash: block.hash, - header: JsonBlockHeader::from(block.header.clone()), - body: JsonBlockBody::from(block.body), - proofs: Vec::new(), - } - } - - /// Returns the hashes of the `Deploy`s included in the `Block`. - pub fn deploy_hashes(&self) -> &Vec { - &self.body.deploy_hashes - } - - /// Returns the hashes of the transfer `Deploy`s included in the `Block`. - pub fn transfer_hashes(&self) -> &Vec { - &self.body.transfer_hashes - } - - #[cfg(feature = "sse-data-testing")] - pub fn random(rng: &mut TestRng) -> Self { - let block = Block::random(rng); - let proofs_count = rng.gen_range(0..11); - let proofs = iter::repeat_with(|| { - let finality_signature = FinalitySignature::random_for_block( - block.hash, - block.header.era_id.value(), - rng, - ); - JsonProof { - public_key: finality_signature.public_key, - signature: finality_signature.signature, - } - }) - .take(proofs_count) - .collect(); - JsonBlock { - hash: block.hash, - header: JsonBlockHeader::from(block.header.clone()), - body: JsonBlockBody::from(block.body), - proofs, - } - } - } - - impl From for Block { - fn from(block: JsonBlock) -> Self { - Block { - hash: block.hash, - header: BlockHeader::from(block.header), - body: BlockBody::from(block.body), - } - } - } - - /// A JSON-friendly representation of a proof, i.e. a block's finality signature. - #[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq, ToSchema)] - #[serde(deny_unknown_fields)] - pub struct JsonProof { - /// "Hex-encoded cryptographic public key, including the algorithm tag prefix." - #[schema(value_type = String)] - public_key: PublicKey, - #[schema(value_type = String)] - signature: Signature, - } - - impl From<(PublicKey, Signature)> for JsonProof { - fn from((public_key, signature): (PublicKey, Signature)) -> JsonProof { - JsonProof { - public_key, - signature, - } - } - } - - impl From for (PublicKey, Signature) { - fn from(proof: JsonProof) -> (PublicKey, Signature) { - (proof.public_key, proof.signature) - } - } -} diff --git a/types/src/deploy.rs b/types/src/deploy.rs deleted file mode 100644 index a5a39f7f..00000000 --- a/types/src/deploy.rs +++ /dev/null @@ -1,313 +0,0 @@ -#[cfg(feature = "sse-data-testing")] -use std::iter; -use std::{ - collections::BTreeSet, - fmt::{self, Display, Formatter}, -}; - -#[cfg(feature = "sse-data-testing")] -use rand::Rng; -use serde::{Deserialize, Serialize}; - -#[cfg(feature = "sse-data-testing")] -use casper_types::{ - bytesrepr::{self, ToBytes}, - testing::TestRng, -}; -use casper_types::{ - runtime_args, PublicKey, RuntimeArgs, SecretKey, Signature, TimeDiff, Timestamp, U512, -}; -use utoipa::ToSchema; - -use crate::{Digest, ExecutableDeployItem}; - -/// A cryptographic hash uniquely identifying a [`Deploy`]. -#[derive( - Copy, - Clone, - Default, - Ord, - PartialOrd, - Eq, - PartialEq, - Hash, - Serialize, - Deserialize, - Debug, - ToSchema, -)] -#[serde(deny_unknown_fields)] -pub struct DeployHash(Digest); - -impl DeployHash { - /// Returns a new `DeployHash`. - pub fn new(digest: Digest) -> Self { - DeployHash(digest) - } - - /// Returns a copy of the wrapped `Digest`. - pub fn inner(&self) -> &Digest { - &self.0 - } -} - -impl Display for DeployHash { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - write!(formatter, "{}", self.0) - } -} - -#[cfg(feature = "sse-data-testing")] -impl ToBytes for DeployHash { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - self.0.to_bytes() - } - - fn serialized_length(&self) -> usize { - self.0.serialized_length() - } -} - -/// The header portion of a [`Deploy`]. -#[derive(Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Serialize, Deserialize, Debug, ToSchema)] -#[serde(deny_unknown_fields)] -pub struct DeployHeader { - /// "Hex-encoded cryptographic public key, including the algorithm tag prefix." - #[schema(value_type = String)] - account: PublicKey, - #[schema(value_type = String)] - timestamp: Timestamp, - #[schema(value_type = String)] - ttl: TimeDiff, - gas_price: u64, - body_hash: Digest, - dependencies: Vec, - chain_name: String, -} - -impl DeployHeader { - /// Returns the account within which the deploy will be run. - pub fn account(&self) -> &PublicKey { - &self.account - } - - /// Returns the deploy creation timestamp. - pub fn timestamp(&self) -> Timestamp { - self.timestamp - } - - /// Returns the duration for which the deploy will stay valid. - pub fn ttl(&self) -> TimeDiff { - self.ttl - } - - /// Returns the price per gas unit for this deploy. - pub fn gas_price(&self) -> u64 { - self.gas_price - } - - /// Returns the hash of the body of this deploy. - pub fn body_hash(&self) -> &Digest { - &self.body_hash - } - - /// Other deploys that have to be run before this one. - pub fn dependencies(&self) -> &Vec { - &self.dependencies - } - - /// Returns the chain name of the network the deploy is supposed to be run on. - pub fn chain_name(&self) -> &str { - &self.chain_name - } -} - -impl Display for DeployHeader { - fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { - write!( - formatter, - "deploy header {{ account {}, timestamp {}, ttl {}, body hash {}, chain name {} }}", - self.account, self.timestamp, self.ttl, self.body_hash, self.chain_name, - ) - } -} - -#[cfg(feature = "sse-data-testing")] -impl ToBytes for DeployHeader { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - buffer.extend(self.account.to_bytes()?); - buffer.extend(self.timestamp.to_bytes()?); - buffer.extend(self.ttl.to_bytes()?); - buffer.extend(self.gas_price.to_bytes()?); - buffer.extend(self.body_hash.to_bytes()?); - buffer.extend(self.dependencies.to_bytes()?); - buffer.extend(self.chain_name.to_bytes()?); - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - self.account.serialized_length() - + self.timestamp.serialized_length() - + self.ttl.serialized_length() - + self.gas_price.serialized_length() - + self.body_hash.serialized_length() - + self.dependencies.serialized_length() - + self.chain_name.serialized_length() - } -} - -/// The signature of a deploy and the public key of the signer. -#[derive(Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Serialize, Deserialize, Debug, ToSchema)] -#[serde(deny_unknown_fields)] -pub struct Approval { - /// "Hex-encoded cryptographic public key, including the algorithm tag prefix." - #[schema(value_type = String)] - signer: PublicKey, - #[schema(value_type = String)] - signature: Signature, -} - -#[cfg(feature = "sse-data-testing")] -impl Approval { - pub fn create(hash: &DeployHash, secret_key: &SecretKey) -> Self { - let signer = PublicKey::from(secret_key); - let signature = casper_types::sign(hash.0, secret_key, &signer); - Self { signer, signature } - } -} - -/// A signed item sent to the network used to request execution of Wasm. -#[derive(Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Serialize, Deserialize, Debug, ToSchema)] -#[serde(deny_unknown_fields)] -pub struct Deploy { - hash: DeployHash, - header: DeployHeader, - payment: ExecutableDeployItem, - session: ExecutableDeployItem, - #[schema(value_type = Vec)] - approvals: BTreeSet, -} - -impl Deploy { - /// Returns the hash uniquely identifying this deploy. - pub fn hash(&self) -> &DeployHash { - &self.hash - } - - /// Returns the header portion of the deploy. - pub fn header(&self) -> &DeployHeader { - &self.header - } - - /// Returns the payment code of the deploy. - pub fn payment(&self) -> &ExecutableDeployItem { - &self.payment - } - - /// Returns the session code of the deploy. - pub fn session(&self) -> &ExecutableDeployItem { - &self.session - } - - /// Returns the `Approval`s of the deploy. - pub fn approvals(&self) -> &BTreeSet { - &self.approvals - } -} - -impl Display for Deploy { - fn fmt(&self, formatter: &mut Formatter<'_>) -> fmt::Result { - write!( - formatter, - "deploy {{ {}, account {}, timestamp {}, ttl {}, body hash {}, chain name {} }}", - self.hash, - self.header.account, - self.header.timestamp, - self.header.ttl, - self.header.body_hash, - self.header.chain_name - ) - } -} - -#[cfg(feature = "sse-data-testing")] -impl Deploy { - pub fn random(rng: &mut TestRng) -> Self { - let timestamp = Timestamp::random(rng); - let ttl = TimeDiff::from_millis(rng.gen_range(60_000..3_600_000)); - Deploy::random_with_timestamp_and_ttl(rng, timestamp, ttl) - } - - /// Generates a random instance but using the specified `timestamp` and `ttl`. - pub fn random_with_timestamp_and_ttl( - rng: &mut TestRng, - timestamp: Timestamp, - ttl: TimeDiff, - ) -> Self { - // Create the deploy "body", i.e. the payment and session items. - // - // We need "amount" in order to be able to get correct info via `deploy_info()`. - let payment_args = runtime_args! { - "amount" => U512::from(10), - }; - let payment = ExecutableDeployItem::StoredContractByName { - name: String::from("casper-example"), - entry_point: String::from("example-entry-point"), - args: payment_args, - }; - let session = rng.gen(); - - // Create the deploy header. - let secret_key = SecretKey::random(rng); - let account = PublicKey::from(&secret_key); - let gas_price = rng.gen_range(1..100); - let body_hash = Digest::hash(serialize_body(&payment, &session)); - let dependencies_count = rng.gen_range(0..4); - let dependencies = iter::repeat_with(|| DeployHash::new(Digest::random(rng))) - .take(dependencies_count) - .collect(); - let chain_name = String::from("casper-example"); - let header = DeployHeader { - account, - timestamp, - ttl, - gas_price, - body_hash, - dependencies, - chain_name, - }; - - // Create the deploy hash and approval. - let hash = DeployHash::new(Digest::hash(serialize_header(&header))); - let approvals = iter::once(Approval::create(&hash, &secret_key)).collect(); - - Deploy { - hash, - header, - payment, - session, - approvals, - } - } -} - -#[cfg(feature = "sse-data-testing")] -fn serialize_header(header: &DeployHeader) -> Vec { - header - .to_bytes() - .unwrap_or_else(|error| panic!("should serialize deploy header: {}", error)) -} - -#[cfg(feature = "sse-data-testing")] -fn serialize_body(payment: &ExecutableDeployItem, session: &ExecutableDeployItem) -> Vec { - let mut buffer = payment - .to_bytes() - .unwrap_or_else(|error| panic!("should serialize payment code: {}", error)); - buffer.extend( - session - .to_bytes() - .unwrap_or_else(|error| panic!("should serialize session code: {}", error)), - ); - buffer -} diff --git a/types/src/digest.rs b/types/src/digest.rs deleted file mode 100644 index 7c14fdd6..00000000 --- a/types/src/digest.rs +++ /dev/null @@ -1,123 +0,0 @@ -use std::{ - array::TryFromSliceError, - fmt::{self, Debug, Display, Formatter}, -}; - -#[cfg(feature = "sse-data-testing")] -use blake2::{ - digest::{Update, VariableOutput}, - VarBlake2b, -}; -use hex_fmt::HexFmt; -#[cfg(feature = "sse-data-testing")] -use rand::Rng; -use serde::{de::Error as SerdeError, Deserialize, Deserializer, Serialize, Serializer}; - -use casper_types::checksummed_hex; -#[cfg(feature = "sse-data-testing")] -use casper_types::{ - bytesrepr::{self, ToBytes}, - testing::TestRng, -}; -use utoipa::ToSchema; - -/// The output of the hash function. -#[derive(Copy, Clone, Default, Ord, PartialOrd, Eq, PartialEq, Hash, ToSchema)] -pub struct Digest([u8; Digest::LENGTH]); - -impl Digest { - /// The number of bytes in a `Digest`. - pub const LENGTH: usize = 32; -} - -impl<'a> TryFrom<&'a [u8]> for Digest { - type Error = TryFromSliceError; - - fn try_from(slice: &[u8]) -> Result { - <[u8; Digest::LENGTH]>::try_from(slice).map(Digest) - } -} - -impl Serialize for Digest { - fn serialize(&self, serializer: S) -> Result { - if serializer.is_human_readable() { - HexFmt(&self.0).to_string().serialize(serializer) - } else { - // This is to keep backwards compatibility with how HexForm encodes byte arrays. - // HexForm treats this like a slice. - self.0[..].serialize(serializer) - } - } -} - -impl<'de> Deserialize<'de> for Digest { - fn deserialize>(deserializer: D) -> Result { - if deserializer.is_human_readable() { - let hex_string = String::deserialize(deserializer)?; - let bytes = - checksummed_hex::decode(hex_string.as_bytes()).map_err(SerdeError::custom)?; - let data = - <[u8; Digest::LENGTH]>::try_from(bytes.as_ref()).map_err(SerdeError::custom)?; - Ok(Digest(data)) - } else { - let data = >::deserialize(deserializer)?; - Digest::try_from(data.as_slice()).map_err(D::Error::custom) - } - } -} - -impl Debug for Digest { - fn fmt(&self, f: &mut Formatter) -> fmt::Result { - write!(f, "{}", HexFmt(&self.0)) - } -} - -impl Display for Digest { - fn fmt(&self, f: &mut Formatter) -> fmt::Result { - write!(f, "{:10}", HexFmt(&self.0)) - } -} - -#[cfg(feature = "sse-data-testing")] -impl Digest { - pub fn hash>(data: T) -> Digest { - let mut ret = [0u8; Digest::LENGTH]; - // NOTE: Safe to unwrap here because our digest length is constant and valid - let mut hasher = VarBlake2b::new(Digest::LENGTH).unwrap(); - hasher.update(data); - hasher.finalize_variable(|hash| ret.clone_from_slice(hash)); - Digest(ret) - } - - pub fn random(rng: &mut TestRng) -> Digest { - Digest(rng.gen()) - } - - pub fn into_vec(self) -> Vec { - self.0.to_vec() - } -} - -impl AsRef<[u8]> for Digest { - fn as_ref(&self) -> &[u8] { - self.0.as_ref() - } -} - -#[cfg(feature = "sse-data-testing")] -impl From<[u8; Digest::LENGTH]> for Digest { - fn from(arr: [u8; Digest::LENGTH]) -> Self { - Digest(arr) - } -} - -#[cfg(feature = "sse-data-testing")] -impl ToBytes for Digest { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - self.0.to_bytes() - } - - fn serialized_length(&self) -> usize { - self.0.serialized_length() - } -} diff --git a/types/src/executable_deploy_item.rs b/types/src/executable_deploy_item.rs deleted file mode 100644 index 0fffb857..00000000 --- a/types/src/executable_deploy_item.rs +++ /dev/null @@ -1,331 +0,0 @@ -use hex_buffer_serde::{Hex, HexForm}; -#[cfg(feature = "sse-data-testing")] -use rand::{ - distributions::{Alphanumeric, Distribution, Standard}, - Rng, -}; -use serde::{Deserialize, Serialize}; - -use casper_types::{ - bytesrepr::Bytes, CLValue, ContractHash, ContractPackageHash, ContractVersion, RuntimeArgs, - U512, -}; -#[cfg(feature = "sse-data-testing")] -use casper_types::{ - bytesrepr::{self, ToBytes}, - system::auction::ARG_AMOUNT, -}; -use utoipa::ToSchema; - -#[cfg(feature = "sse-data-testing")] -macro_rules! bx { - ($e:expr) => { - Box::new($e) - }; -} - -#[cfg(feature = "sse-data-testing")] -const TAG_LENGTH: usize = 1; -#[cfg(feature = "sse-data-testing")] -const MODULE_BYTES_TAG: u8 = 0; -#[cfg(feature = "sse-data-testing")] -const STORED_CONTRACT_BY_HASH_TAG: u8 = 1; -#[cfg(feature = "sse-data-testing")] -const STORED_CONTRACT_BY_NAME_TAG: u8 = 2; -#[cfg(feature = "sse-data-testing")] -const STORED_VERSIONED_CONTRACT_BY_HASH_TAG: u8 = 3; -#[cfg(feature = "sse-data-testing")] -const STORED_VERSIONED_CONTRACT_BY_NAME_TAG: u8 = 4; -#[cfg(feature = "sse-data-testing")] -const TRANSFER_TAG: u8 = 5; -#[cfg(feature = "sse-data-testing")] -const MAX_PAYMENT_AMOUNT: u64 = 2_500_000_000; - -/// The payment or session code of a [`Deploy`]. -#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize, Debug, ToSchema)] -#[serde(deny_unknown_fields)] -pub enum ExecutableDeployItem { - /// Raw bytes of compiled Wasm code, which must include a `call` entry point, and the arguments - /// to call at runtime. - ModuleBytes { - /// The compiled Wasm bytes. - #[schema(value_type = String)] - module_bytes: Bytes, - /// The arguments to be passed to the entry point at runtime. - args: RuntimeArgs, - }, - /// A contract stored in global state, referenced by its "hash", along with the entry point and - /// arguments to call at runtime. - StoredContractByHash { - /// The contract's identifier. - #[serde(with = "HexForm")] - hash: ContractHash, - /// The contract's entry point to be called at runtime. - entry_point: String, - /// The arguments to be passed to the entry point at runtime. - args: RuntimeArgs, - }, - /// A contract stored in global state, referenced by a named key existing in the `Deploy`'s - /// account context, along with the entry point and arguments to call at runtime. - StoredContractByName { - /// The named of the named key under which the contract is referenced. - name: String, - /// The contract's entry point to be called at runtime. - entry_point: String, - /// The arguments to be passed to the entry point at runtime. - args: RuntimeArgs, - }, - /// A versioned contract stored in global state, referenced by its "hash", along with the entry - /// point and arguments to call at runtime. - StoredVersionedContractByHash { - /// The contract package's identifier. - #[serde(with = "HexForm")] - hash: ContractPackageHash, - /// The version of the contract to call. If `None`, the highest enabled version is used. - version: Option, - /// The contract's entry point to be called at runtime. - entry_point: String, - /// The arguments to be passed to the entry point at runtime. - args: RuntimeArgs, - }, - /// A versioned contract stored in global state, referenced by a named key existing in the - /// `Deploy`'s account context, along with the entry point and arguments to call at runtime. - StoredVersionedContractByName { - /// The named of the named key under which the contract package is referenced. - name: String, - /// The version of the contract to call. If `None`, the highest enabled version is used. - version: Option, - /// The contract's entry point to be called at runtime. - entry_point: String, - /// The arguments to be passed to the entry point at runtime. - args: RuntimeArgs, - }, - /// A native transfer which does not contain or reference any Wasm code. - Transfer { - /// The arguments to be passed to the native transfer entry point at runtime. - args: RuntimeArgs, - }, -} - -#[cfg(feature = "sse-data-testing")] -impl ExecutableDeployItem { - fn fields_serialized_length(&self) -> usize { - let components: Vec> = match self { - ExecutableDeployItem::ModuleBytes { module_bytes, args } => { - vec![bx!(module_bytes), bx!(args)] - } - ExecutableDeployItem::StoredContractByHash { - hash, - entry_point, - args, - } => vec![bx!(hash), bx!(entry_point), bx!(args)], - ExecutableDeployItem::StoredContractByName { - name, - entry_point, - args, - } => vec![bx!(name), bx!(entry_point), bx!(args)], - ExecutableDeployItem::StoredVersionedContractByHash { - hash, - version, - entry_point, - args, - } => vec![bx!(hash), bx!(version), bx!(entry_point), bx!(args)], - ExecutableDeployItem::StoredVersionedContractByName { - name, - version, - entry_point, - args, - } => vec![bx!(name), bx!(version), bx!(entry_point), bx!(args)], - ExecutableDeployItem::Transfer { args } => vec![bx!(args)], - }; - components - .into_iter() - .map(|to_bytes| to_bytes.serialized_length()) - .sum() - } -} - -#[cfg(feature = "sse-data-testing")] -impl ToBytes for ExecutableDeployItem { - fn to_bytes(&self) -> Result, bytesrepr::Error> { - let mut buffer = bytesrepr::allocate_buffer(self)?; - match self { - ExecutableDeployItem::ModuleBytes { module_bytes, args } => { - write_module_bytes(&mut buffer, module_bytes, args)? - } - ExecutableDeployItem::StoredContractByHash { - hash, - entry_point, - args, - } => write_stored_contract(&mut buffer, hash, entry_point, args)?, - ExecutableDeployItem::StoredContractByName { - name, - entry_point, - args, - } => write_stored_contract_by_name(&mut buffer, name, entry_point, args)?, - ExecutableDeployItem::StoredVersionedContractByHash { - hash, - version, - entry_point, - args, - } => write_versioned_contract_by_hash(&mut buffer, hash, version, entry_point, args)?, - ExecutableDeployItem::StoredVersionedContractByName { - name, - version, - entry_point, - args, - } => write_versioned_contract_by_name(&mut buffer, name, version, entry_point, args)?, - ExecutableDeployItem::Transfer { args } => write_transfer(&mut buffer, args)?, - } - Ok(buffer) - } - - fn serialized_length(&self) -> usize { - TAG_LENGTH + self.fields_serialized_length() - } -} - -#[cfg(feature = "sse-data-testing")] -fn write_transfer(buffer: &mut Vec, args: &RuntimeArgs) -> Result<(), bytesrepr::Error> { - buffer.insert(0, TRANSFER_TAG); - buffer.extend(args.to_bytes()?); - Ok(()) -} - -#[cfg(feature = "sse-data-testing")] -fn write_versioned_contract_by_name( - buffer: &mut Vec, - name: &String, - version: &Option, - entry_point: &String, - args: &RuntimeArgs, -) -> Result<(), bytesrepr::Error> { - buffer.insert(0, STORED_VERSIONED_CONTRACT_BY_NAME_TAG); - buffer.extend(name.to_bytes()?); - buffer.extend(version.to_bytes()?); - buffer.extend(entry_point.to_bytes()?); - buffer.extend(args.to_bytes()?); - Ok(()) -} - -#[cfg(feature = "sse-data-testing")] -fn write_versioned_contract_by_hash( - buffer: &mut Vec, - hash: &ContractPackageHash, - version: &Option, - entry_point: &String, - args: &RuntimeArgs, -) -> Result<(), bytesrepr::Error> { - buffer.insert(0, STORED_VERSIONED_CONTRACT_BY_HASH_TAG); - buffer.extend(hash.to_bytes()?); - buffer.extend(version.to_bytes()?); - buffer.extend(entry_point.to_bytes()?); - buffer.extend(args.to_bytes()?); - Ok(()) -} - -#[cfg(feature = "sse-data-testing")] -fn write_stored_contract_by_name( - buffer: &mut Vec, - name: &String, - entry_point: &String, - args: &RuntimeArgs, -) -> Result<(), bytesrepr::Error> { - buffer.insert(0, STORED_CONTRACT_BY_NAME_TAG); - buffer.extend(name.to_bytes()?); - buffer.extend(entry_point.to_bytes()?); - buffer.extend(args.to_bytes()?); - Ok(()) -} - -#[cfg(feature = "sse-data-testing")] -fn write_stored_contract( - buffer: &mut Vec, - hash: &ContractHash, - entry_point: &String, - args: &RuntimeArgs, -) -> Result<(), bytesrepr::Error> { - buffer.insert(0, STORED_CONTRACT_BY_HASH_TAG); - buffer.extend(hash.to_bytes()?); - buffer.extend(entry_point.to_bytes()?); - buffer.extend(args.to_bytes()?); - Ok(()) -} - -#[cfg(feature = "sse-data-testing")] -fn write_module_bytes( - buffer: &mut Vec, - module_bytes: &Bytes, - args: &RuntimeArgs, -) -> Result<(), bytesrepr::Error> { - buffer.insert(0, MODULE_BYTES_TAG); - buffer.extend(module_bytes.to_bytes()?); - buffer.extend(args.to_bytes()?); - Ok(()) -} - -#[cfg(feature = "sse-data-testing")] -impl Distribution for Standard { - fn sample(&self, rng: &mut R) -> ExecutableDeployItem { - let mut args = RuntimeArgs::new(); - let _ = args.insert(random_string(rng), Bytes::from(random_bytes(rng))); - match rng.gen_range(0..5) { - 0 => ExecutableDeployItem::ModuleBytes { - module_bytes: random_bytes(rng).into(), - args, - }, - 1 => ExecutableDeployItem::StoredContractByHash { - hash: ContractHash::new(rng.gen()), - entry_point: random_string(rng), - args, - }, - 2 => ExecutableDeployItem::StoredContractByName { - name: random_string(rng), - entry_point: random_string(rng), - args, - }, - 3 => ExecutableDeployItem::StoredVersionedContractByHash { - hash: ContractPackageHash::new(rng.gen()), - version: rng.gen(), - entry_point: random_string(rng), - args, - }, - 4 => ExecutableDeployItem::StoredVersionedContractByName { - name: random_string(rng), - version: rng.gen(), - entry_point: random_string(rng), - args, - }, - 5 => random_transfer(rng), - _ => unreachable!(), - } - } -} - -#[cfg(feature = "sse-data-testing")] -fn random_string(rng: &mut R) -> String { - rng.sample_iter(&Alphanumeric) - .take(20) - .map(char::from) - .collect() -} - -#[cfg(feature = "sse-data-testing")] -fn random_bytes(rng: &mut R) -> Vec { - let mut bytes = vec![0u8; rng.gen_range(0..100)]; - rng.fill_bytes(bytes.as_mut()); - bytes -} - -#[cfg(feature = "sse-data-testing")] -fn random_transfer(rng: &mut R) -> ExecutableDeployItem { - let amount = rng.gen_range(MAX_PAYMENT_AMOUNT..1_000_000_000_000_000); - let mut transfer_args = RuntimeArgs::new(); - transfer_args.insert_cl_value( - ARG_AMOUNT, - CLValue::from_t(U512::from(amount)).expect("should get CLValue from U512"), - ); - ExecutableDeployItem::Transfer { - args: transfer_args, - } -} diff --git a/types/src/filter.rs b/types/src/filter.rs index 58779b7c..93776fc2 100644 --- a/types/src/filter.rs +++ b/types/src/filter.rs @@ -4,18 +4,12 @@ use std::fmt::{Display, Formatter}; #[derive(Hash, Eq, PartialEq, Debug, Clone)] pub enum Filter { Events, - Main, - Deploys, - Sigs, } impl Display for Filter { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { match self { Filter::Events => write!(f, "events"), - Filter::Main => write!(f, "events/main"), - Filter::Deploys => write!(f, "events/deploys"), - Filter::Sigs => write!(f, "events/sigs"), } } } diff --git a/types/src/lib.rs b/types/src/lib.rs index 0b82ee11..0129df0d 100644 --- a/types/src/lib.rs +++ b/types/src/lib.rs @@ -4,18 +4,10 @@ #[cfg_attr(not(test), macro_use)] extern crate alloc; -pub mod block; -pub mod deploy; -mod digest; -mod executable_deploy_item; mod filter; pub mod metrics; pub mod sse_data; #[cfg(feature = "sse-data-testing")] mod testing; -pub use crate::executable_deploy_item::ExecutableDeployItem; -pub use block::{json_compatibility::JsonBlock, Block, BlockHash, FinalitySignature}; -pub use deploy::{Deploy, DeployHash}; -pub use digest::Digest; pub use filter::Filter; diff --git a/types/src/sse_data.rs b/types/src/sse_data.rs index a89303cc..56c9f653 100644 --- a/types/src/sse_data.rs +++ b/types/src/sse_data.rs @@ -7,9 +7,9 @@ pub enum EventFilter { ApiVersion, SidecarVersion, BlockAdded, - DeployAccepted, - DeployProcessed, - DeployExpired, + TransactionAccepted, + TransactionProcessed, + TransactionExpired, Fault, FinalitySignature, Step, @@ -17,10 +17,11 @@ pub enum EventFilter { #[cfg(feature = "sse-data-testing")] use super::testing; -use crate::{BlockHash, Deploy, DeployHash, FinalitySignature, JsonBlock}; +use casper_types::{ + contract_messages::Messages, execution::ExecutionResult, Block, BlockHash, ChainNameDigest, EraId, FinalitySignature, InitiatorAddr, ProtocolVersion, PublicKey, TestBlockBuilder, TimeDiff, Timestamp, Transaction, TransactionHash +}; #[cfg(feature = "sse-data-testing")] -use casper_types::testing::TestRng; -use casper_types::{EraId, ExecutionResult, ProtocolVersion, PublicKey, TimeDiff, Timestamp}; +use casper_types::{execution::ExecutionResultV2, testing::TestRng}; #[cfg(feature = "sse-data-testing")] use rand::Rng; use serde::{Deserialize, Serialize}; @@ -65,26 +66,23 @@ pub enum SseData { /// The given block has been added to the linear chain and stored locally. BlockAdded { block_hash: BlockHash, - block: Box, + block: Box, }, - /// The given deploy has been newly-accepted by this node. - DeployAccepted { - #[serde(flatten)] - // It's an Arc to not create multiple copies of the same deploy for multiple subscribers. - deploy: Arc, - }, - /// The given deploy has been executed, committed and forms part of the given block. - DeployProcessed { - deploy_hash: Box, - account: Box, + /// The given transaction has been newly-accepted by this node. + TransactionAccepted(Arc), + /// The given transaction has been executed, committed and forms part of the given block. + TransactionProcessed { + transaction_hash: Box, + initiator_addr: Box, timestamp: Timestamp, ttl: TimeDiff, - dependencies: Vec, block_hash: Box, + //#[data_size(skip)] execution_result: Box, + messages: Messages, }, - /// The given deploy has expired. - DeployExpired { deploy_hash: DeployHash }, + /// The given transaction has expired. + TransactionExpired { transaction_hash: TransactionHash }, /// Generic representation of validator's fault in an era. Fault { era_id: EraId, @@ -96,7 +94,7 @@ pub enum SseData { /// The execution effects produced by a `StepRequest`. Step { era_id: EraId, - execution_effect: Box, + execution_effects: Box, }, /// The node is about to shut down. Shutdown, @@ -106,12 +104,17 @@ impl SseData { pub fn should_include(&self, filter: &[EventFilter]) -> bool { match self { SseData::Shutdown => true, - SseData::ApiVersion(_) => filter.contains(&EventFilter::ApiVersion), + //Keeping the rest part as explicit match so that if a new variant is added, it will be caught by the compiler SseData::Shutdown SseData::SidecarVersion(_) => filter.contains(&EventFilter::SidecarVersion), + SseData::ApiVersion(_) => filter.contains(&EventFilter::ApiVersion), SseData::BlockAdded { .. } => filter.contains(&EventFilter::BlockAdded), - SseData::DeployAccepted { .. } => filter.contains(&EventFilter::DeployAccepted), - SseData::DeployProcessed { .. } => filter.contains(&EventFilter::DeployProcessed), - SseData::DeployExpired { .. } => filter.contains(&EventFilter::DeployExpired), + SseData::TransactionAccepted { .. } => { + filter.contains(&EventFilter::TransactionAccepted) + } + SseData::TransactionProcessed { .. } => { + filter.contains(&EventFilter::TransactionProcessed) + } + SseData::TransactionExpired { .. } => filter.contains(&EventFilter::TransactionExpired), SseData::Fault { .. } => filter.contains(&EventFilter::Fault), SseData::FinalitySignature(_) => filter.contains(&EventFilter::FinalitySignature), SseData::Step { .. } => filter.contains(&EventFilter::Step), @@ -133,41 +136,49 @@ impl SseData { /// Returns a random `SseData::BlockAdded`. pub fn random_block_added(rng: &mut TestRng) -> Self { - let block = JsonBlock::random(rng); + let block = TestBlockBuilder::new().build(rng); SseData::BlockAdded { - block_hash: block.hash, - block: Box::new(block), + block_hash: *block.hash(), + block: Box::new(block.into()), } } /// Returns a random `SseData::DeployAccepted`, along with the random `Deploy`. - pub fn random_deploy_accepted(rng: &mut TestRng) -> (Self, Deploy) { - let deploy = Deploy::random(rng); - let event = SseData::DeployAccepted { - deploy: Arc::new(deploy.clone()), - }; - (event, deploy) + pub fn random_transaction_accepted(rng: &mut TestRng) -> (Self, Transaction) { + let transaction = Transaction::random(rng); + let event = SseData::TransactionAccepted(Arc::new(transaction.clone())); + (event, transaction) } /// Returns a random `SseData::DeployProcessed`. - pub fn random_deploy_processed(rng: &mut TestRng) -> Self { - let deploy = Deploy::random(rng); - SseData::DeployProcessed { - deploy_hash: Box::new(*deploy.hash()), - account: Box::new(deploy.header().account().clone()), - timestamp: deploy.header().timestamp(), - ttl: deploy.header().ttl(), - dependencies: deploy.header().dependencies().clone(), + pub fn random_transaction_processed(rng: &mut TestRng) -> Self { + let transaction = Transaction::random(rng); + let timestamp = match &transaction { + Transaction::Deploy(deploy) => deploy.header().timestamp(), + Transaction::V1(v1_transaction) => v1_transaction.timestamp(), + }; + let ttl = match &transaction { + Transaction::Deploy(deploy) => deploy.header().ttl(), + Transaction::V1(v1_transaction) => v1_transaction.ttl(), + }; + + SseData::TransactionProcessed { + transaction_hash: Box::new(TransactionHash::random(rng)), + initiator_addr: Box::new(transaction.initiator_addr().clone()), + timestamp, + ttl, block_hash: Box::new(BlockHash::random(rng)), - execution_result: Box::new(rng.gen()), + //#[data_size(skip)] + execution_result: Box::new(ExecutionResult::random(rng)), + messages: rng.random_vec(1..5), } } /// Returns a random `SseData::DeployExpired` - pub fn random_deploy_expired(rng: &mut TestRng) -> Self { - let deploy = testing::create_expired_deploy(Timestamp::now(), rng); - SseData::DeployExpired { - deploy_hash: *deploy.hash(), + pub fn random_transaction_expired(rng: &mut TestRng) -> Self { + let transaction = testing::create_expired_transaction(Timestamp::now(), rng); + SseData::TransactionExpired { + transaction_hash: transaction.hash(), } } @@ -182,25 +193,40 @@ impl SseData { /// Returns a random `SseData::FinalitySignature`. pub fn random_finality_signature(rng: &mut TestRng) -> Self { + let block_hash = BlockHash::random(rng); + let block_height = rng.gen::(); + let era_id = EraId::random(rng); + let chain_name_digest = ChainNameDigest::random(rng); SseData::FinalitySignature(Box::new(FinalitySignature::random_for_block( - BlockHash::random(rng), - rng.gen(), + block_hash, + block_height, + era_id, + chain_name_digest, rng, ))) } /// Returns a random `SseData::Step`. pub fn random_step(rng: &mut TestRng) -> Self { - let execution_effect = match rng.gen::() { - ExecutionResult::Success { effect, .. } | ExecutionResult::Failure { effect, .. } => { - effect - } + let execution_effects = match ExecutionResultV2::random(rng) { + ExecutionResultV2::Success { effects, .. } + | ExecutionResultV2::Failure { effects, .. } => effects, }; SseData::Step { era_id: EraId::new(rng.gen()), - execution_effect: to_raw_value(&execution_effect).unwrap(), + execution_effects: to_raw_value(&execution_effects).unwrap(), } } + + /// Returns a random `SseData::SidecarVersion`. + pub fn random_sidecar_version(rng: &mut TestRng) -> Self { + let protocol_version = ProtocolVersion::from_parts( + rng.gen_range(2..10), + rng.gen::() as u32, + rng.gen::() as u32, + ); + SseData::SidecarVersion(protocol_version) + } } #[cfg(feature = "sse-data-testing")] @@ -215,21 +241,21 @@ pub mod test_support { "000625a798318315a4f401828f6d53371a623d79653db03a79a4cfbdd1e4ae53"; pub fn example_api_version() -> String { - "{\"ApiVersion\":\"1.5.2\"}".to_string() + "{\"ApiVersion\":\"2.0.0\"}".to_string() } pub fn shutdown() -> String { "\"Shutdown\"".to_string() } - pub fn example_block_added_1_5_2(block_hash: &str, height: &str) -> String { - let raw_block_added = format!("{{\"BlockAdded\":{{\"block_hash\":\"{block_hash}\",\"block\":{{\"hash\":\"{block_hash}\",\"header\":{{\"parent_hash\":\"4a28718301a83a43563ec42a184294725b8dd188aad7a9fceb8a2fa1400c680e\",\"state_root_hash\":\"63274671f2a860e39bb029d289e688526e4828b70c79c678649748e5e376cb07\",\"body_hash\":\"6da90c09f3fc4559d27b9fff59ab2453be5752260b07aec65e0e3a61734f656a\",\"random_bit\":true,\"accumulated_seed\":\"c8b4f30a3e3e082f4f206f972e423ffb23d152ca34241ff94ba76189716b61da\",\"era_end\":{{\"era_report\":{{\"equivocators\":[],\"rewards\":[{{\"validator\":\"01026ca707c348ed8012ac6a1f28db031fadd6eb67203501a353b867a08c8b9a80\",\"amount\":1559401400039}},{{\"validator\":\"010427c1d1227c9d2aafe8c06c6e6b276da8dcd8fd170ca848b8e3e8e1038a6dc8\",\"amount\":25895190891}}],\"inactive_validators\":[]}},\"next_era_validator_weights\":[{{\"validator\":\"01026ca707c348ed8012ac6a1f28db031fadd6eb67203501a353b867a08c8b9a80\",\"weight\":\"50538244651768072\"}},{{\"validator\":\"010427c1d1227c9d2aafe8c06c6e6b276da8dcd8fd170ca848b8e3e8e1038a6dc8\",\"weight\":\"839230678448335\"}}]}},\"timestamp\":\"2021-04-08T05:14:14.912Z\",\"era_id\":90,\"height\":{height},\"protocol_version\":\"1.0.0\"}},\"body\":{{\"proposer\":\"012bac1d0ff9240ff0b7b06d555815640497861619ca12583ddef434885416e69b\",\"deploy_hashes\":[],\"transfer_hashes\":[]}},\"proofs\":[]}}}}}}"); + pub fn example_block_added_2_0_0(hash: &str, height: &str) -> String { + let raw_block_added = format!("{{\"BlockAdded\":{{\"block_hash\":\"{hash}\",\"block\":{{\"Version2\":{{\"hash\":\"{hash}\",\"header\":{{\"parent_hash\":\"e38f28265439296d106cf111869cd17a3ca114707ae2c82b305bf830f90a36a5\",\"state_root_hash\":\"e7ec15c0700717850febb2a0a67ee5d3a55ddb121b1fc70e5bcf154e327fe6c6\",\"body_hash\":\"5ad04cda6912de119d776045d44a4266e05eb768d4c1652825cc19bce7030d2c\",\"random_bit\":false,\"accumulated_seed\":\"bbcabbb76ac8714a37e928b7f0bde4caeddf5e446e51a36ceab9a34f5e983b92\",\"era_end\":null,\"timestamp\":\"2024-02-22T08:18:44.352Z\",\"era_id\":2,\"height\":{height},\"protocol_version\":\"1.5.3\"}},\"body\":{{\"proposer\":\"01302f30e5a5a00b2a0afbfbe9e63b3a9feb278d5f1944ba5efffa15fbb2e8a2e6\",\"transfer\":[],\"staking\":[],\"install_upgrade\":[],\"standard\":[{{\"Deploy\":\"2e3083dbf5344c82efeac5e1a079bfd94acc1dfb454da0d92970f2e18e3afa9f\"}}],\"rewarded_signatures\":[[248],[0],[0]]}}}}}}}}}}"); super::deserialize(&raw_block_added).unwrap(); // deserializing to make sure that the raw json string is in correct form raw_block_added } - pub fn example_finality_signature_1_5_2(block_hash: &str) -> String { - let raw_block_added = format!("{{\"FinalitySignature\":{{\"block_hash\":\"{block_hash}\",\"era_id\":8538,\"signature\":\"0157368db32b578c1cf97256c3012d50afc5745fe22df2f4be1efd0bdf82b63ce072b4726fdfb7c026068b38aaa67ea401b49d969ab61ae587af42c64de8914101\",\"public_key\":\"0138e64f04c03346e94471e340ca7b94ba3581e5697f4d1e59f5a31c0da720de45\"}}}}"); + pub fn example_finality_signature_2_0_0(hash: &str) -> String { + let raw_block_added = format!("{{\"FinalitySignature\":{{\"block_hash\":\"{hash}\",\"era_id\":2,\"signature\":\"01ff6089c9b187f38ba61b518082db22552fb4762d505773e8221f6593c45e0602de560c4690b035dbacba9ab9dbe63e97d928970a515ea6a25fb920b3e9099d05\",\"public_key\":\"01914182c7d11ef13dccdbf1470648af3c3cd7f570bc351f0c14112370b19b8331\"}}}}"); super::deserialize(&raw_block_added).unwrap(); // deserializing to make sure that the raw json string is in correct form raw_block_added } diff --git a/types/src/testing.rs b/types/src/testing.rs index 95c8bea7..c9496fb4 100644 --- a/types/src/testing.rs +++ b/types/src/testing.rs @@ -3,23 +3,38 @@ //! Contains various parts and components to aid writing tests and simulations using the //! `casper-node` library. -use casper_types::{testing::TestRng, TimeDiff, Timestamp}; - -use crate::Deploy; +use casper_types::{ + testing::TestRng, Deploy, TimeDiff, Timestamp, Transaction, TransactionV1Builder, +}; +use rand::Rng; /// Creates a test deploy created at given instant and with given ttl. -pub fn create_test_deploy( +pub fn create_test_transaction( created_ago: TimeDiff, ttl: TimeDiff, now: Timestamp, test_rng: &mut TestRng, -) -> Deploy { - Deploy::random_with_timestamp_and_ttl(test_rng, now - created_ago, ttl) +) -> Transaction { + if test_rng.gen() { + Transaction::Deploy(Deploy::random_with_timestamp_and_ttl( + test_rng, + now - created_ago, + ttl, + )) + } else { + let timestamp = now - created_ago; + let transaction = TransactionV1Builder::new_random(test_rng) + .with_timestamp(timestamp) + .with_ttl(ttl) + .build() + .unwrap(); + Transaction::V1(transaction) + } } /// Creates a random deploy that is considered expired. -pub fn create_expired_deploy(now: Timestamp, test_rng: &mut TestRng) -> Deploy { - create_test_deploy( +pub fn create_expired_transaction(now: Timestamp, test_rng: &mut TestRng) -> Transaction { + create_test_transaction( TimeDiff::from_seconds(20), TimeDiff::from_seconds(10), now,