diff --git a/.github/workflows/bench.yml b/.github/workflows/bench.yml index 723704ffba1..1f7d54d15a2 100644 --- a/.github/workflows/bench.yml +++ b/.github/workflows/bench.yml @@ -53,3 +53,15 @@ jobs: shell: 'script -q -e -c "bash {0}"' # required to workaround /dev/tty not being available run: | ./.github/scripts/compare_iai.sh + + # Checks that benchmarks not run in CI compile + bench-check: + name: check + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - name: Install toolchain + uses: dtolnay/rust-toolchain@stable + - uses: Swatinem/rust-cache@v2 + - name: Check if benchmarks build + run: cargo bench --all --all-features --all-targets --no-run diff --git a/.github/workflows/project.yml b/.github/workflows/project.yml deleted file mode 100644 index 4315ebf1aaf..00000000000 --- a/.github/workflows/project.yml +++ /dev/null @@ -1,16 +0,0 @@ -name: project - -on: - issues: - types: - - opened - -jobs: - add: - name: add issue/pr - runs-on: ubuntu-latest - steps: - - uses: actions/add-to-project@v0.4.0 - with: - project-url: https://github.com/orgs/paradigmxyz/projects/1 - github-token: ${{ secrets.GH_PROJECT_TOKEN }} diff --git a/.github/workflows/sanity.yml b/.github/workflows/sanity.yml index 59de0b5b9bd..1b33133fde8 100644 --- a/.github/workflows/sanity.yml +++ b/.github/workflows/sanity.yml @@ -25,7 +25,6 @@ jobs: - name: Install toolchain uses: dtolnay/rust-toolchain@nightly with: - toolchain: nightly-2022-12-27 components: llvm-tools-preview - uses: Swatinem/rust-cache@v2 with: diff --git a/Cargo.lock b/Cargo.lock index 114e99007c0..4ad04cc2b35 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2,6 +2,16 @@ # It is not intended for manual editing. version = 3 +[[package]] +name = "Inflector" +version = "0.11.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fe438c63458706e03479442743baae6c88256498e6431708f6dfc520a26515d3" +dependencies = [ + "lazy_static", + "regex", +] + [[package]] name = "addr2line" version = "0.19.0" @@ -128,21 +138,21 @@ checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299" [[package]] name = "anyhow" -version = "1.0.68" +version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2cb2f989d18dd141ab8ae82f64d1a8cdd37e0840f73a406896cf5e99502fab61" +checksum = "224afbd727c3d6e4b90103ece64b8d1b67fbb1973b1046c2281eed3f3803f800" [[package]] name = "aquamarine" -version = "0.2.1" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d7cba9b073f771a6f76683be98624dd68e867ff9e6adcad3afbb3d2044c3afa" +checksum = "759d98a5db12e9c9d98ef2b92f794ae5c7ded6ec18d21c3fa485c9c65bec237d" dependencies = [ - "itertools 0.9.0", + "itertools", "proc-macro-error", - "proc-macro2 1.0.51", + "proc-macro2 1.0.52", "quote 1.0.23", - "syn 1.0.107", + "syn 1.0.109", ] [[package]] @@ -182,44 +192,22 @@ dependencies = [ [[package]] name = "async-lock" -version = "2.6.0" +version = "2.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8101efe8695a6c17e02911402145357e718ac92d3ff88ae8419e84b1707b685" +checksum = "fa24f727524730b077666307f2734b4a1a1c57acb79193127dcc8914d5242dd7" dependencies = [ "event-listener", - "futures-lite", -] - -[[package]] -name = "async-stream" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dad5c83079eae9969be7fadefe640a1c566901f05ff91ab221de4b6f68d9507e" -dependencies = [ - "async-stream-impl", - "futures-core", -] - -[[package]] -name = "async-stream-impl" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "10f203db73a71dfa2fb6dd22763990fa26f3d2625a6da2da900d23b87d26be27" -dependencies = [ - "proc-macro2 1.0.51", - "quote 1.0.23", - "syn 1.0.107", ] [[package]] name = "async-trait" -version = "0.1.64" +version = "0.1.65" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1cd7fce9ba8c3c042128ce72d8b2ddbf3a05747efb67ea0313c635e10bda47a2" +checksum = "095183a3539c7c7649b2beb87c2d3f0591f3a7fed07761cc546d244e27e0238c" dependencies = [ - "proc-macro2 1.0.51", + "proc-macro2 1.0.52", "quote 1.0.23", - "syn 1.0.107", + "syn 1.0.109", ] [[package]] @@ -265,18 +253,6 @@ dependencies = [ "winapi", ] -[[package]] -name = "auto_impl" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7862e21c893d65a1650125d157eaeec691439379a1cee17ee49031b79236ada4" -dependencies = [ - "proc-macro-error", - "proc-macro2 1.0.51", - "quote 1.0.23", - "syn 1.0.107", -] - [[package]] name = "auto_impl" version = "1.0.1" @@ -284,9 +260,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8a8c1df849285fbacd587de7818cc7d13be6cd2cbcd47a04fb1801b0e2706e33" dependencies = [ "proc-macro-error", - "proc-macro2 1.0.51", + "proc-macro2 1.0.52", "quote 1.0.23", - "syn 1.0.107", + "syn 1.0.109", ] [[package]] @@ -328,6 +304,12 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "349a06037c7bf932dd7e7d1f653678b2038b9ad46a74102f1fc7bd7872678cce" +[[package]] +name = "base16ct" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c7f02d4ea65f2c1853089ffd8d2787bdbc63de2f0d29dedbcf8ccdfa0ccd4cf" + [[package]] name = "base58" version = "0.1.0" @@ -364,9 +346,9 @@ checksum = "a4a4ddaa51a5bc52a6948f74c06d20aaaddb71924eab79b8c97a8c556e942d6a" [[package]] name = "base64ct" -version = "1.5.3" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b645a089122eccb6111b4f81cbc1a49f5900ac4666bb93ac027feaecf15607bf" +checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" [[package]] name = "basic-toml" @@ -413,12 +395,12 @@ dependencies = [ "lazy_static", "lazycell", "peeking_take_while", - "proc-macro2 1.0.51", + "proc-macro2 1.0.52", "quote 1.0.23", "regex", "rustc-hash", "shlex", - "syn 1.0.107", + "syn 1.0.109", ] [[package]] @@ -460,6 +442,7 @@ checksum = "1bc2832c24239b0141d5674bb9174f9d68a8b5b3f2753311927c172ca46f7e9c" dependencies = [ "funty", "radium 0.7.0", + "serde", "tap", "wyz", ] @@ -550,9 +533,9 @@ checksum = "771fe0050b883fcc3ea2359b1a96bcfbc090b7116eae7c3c512c7a083fdf23d3" [[package]] name = "bstr" -version = "1.1.0" +version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b45ea9b00a7b3f2988e9a65ad3917e62123c38dba709b666506207be96d1790b" +checksum = "5ffdb39cb703212f3c11973452c2861b972f757b021158f3516ba10f2fa8b2c1" dependencies = [ "memchr", "serde", @@ -578,9 +561,9 @@ checksum = "e3b5ca7a04898ad4bcd41c90c5285445ff5b791899bb1b0abdd2a2aa791211d7" [[package]] name = "bytemuck" -version = "1.13.0" +version = "1.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c041d3eab048880cb0b86b256447da3f18859a163c3b8d8893f4e6368abe6393" +checksum = "17febce684fd15d89027105661fec94afb475cb995fbc59d2865198446ba2eea" [[package]] name = "byteorder" @@ -599,9 +582,9 @@ dependencies = [ [[package]] name = "camino" -version = "1.1.2" +version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c77df041dc383319cc661b428b6961a005db4d6808d5e12536931b1ca9556055" +checksum = "6031a462f977dd38968b6f23378356512feeace69cef817e1a4475108093cec3" dependencies = [ "serde", ] @@ -617,13 +600,13 @@ dependencies = [ [[package]] name = "cargo_metadata" -version = "0.15.2" +version = "0.15.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "982a0cf6a99c350d7246035613882e376d58cebe571785abc5da4f648d53ac0a" +checksum = "08a1ec454bc3eead8719cb56e15dbbfecdbc14e4b3a3ae4936cc6e31f5fc0d07" dependencies = [ "camino", "cargo-platform", - "semver 1.0.16", + "semver 1.0.17", "serde", "serde_json", "thiserror", @@ -734,9 +717,9 @@ dependencies = [ [[package]] name = "clang-sys" -version = "1.4.0" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa2e27ae6ab525c3d369ded447057bca5438d86dc3a68f6faafb8269ba82ebf3" +checksum = "77ed9a53e5d4d9c573ae844bfac6872b159cb1d1585a83b29e7a64b7eef7332a" dependencies = [ "glob", "libc", @@ -757,13 +740,13 @@ dependencies = [ [[package]] name = "clap" -version = "4.1.4" +version = "4.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f13b9c79b5d1dd500d20ef541215a6423c75829ef43117e1b4d17fd8af0b5d76" +checksum = "c3d7ae14b20b94cb02149ed21a86c423859cbe18dc7ed69845cace50e52b40a5" dependencies = [ "bitflags", "clap_derive", - "clap_lex 0.3.1", + "clap_lex 0.3.2", "is-terminal", "once_cell", "strsim 0.10.0", @@ -772,15 +755,15 @@ dependencies = [ [[package]] name = "clap_derive" -version = "4.1.0" +version = "4.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "684a277d672e91966334af371f1a7b5833f9aa00b07c84e92fbce95e00208ce8" +checksum = "44bec8e5c9d09e439c4335b1af0abaab56dcf3b94999a936e1bb47b9134288f0" dependencies = [ "heck", "proc-macro-error", - "proc-macro2 1.0.51", + "proc-macro2 1.0.52", "quote 1.0.23", - "syn 1.0.107", + "syn 1.0.109", ] [[package]] @@ -794,9 +777,9 @@ dependencies = [ [[package]] name = "clap_lex" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "783fe232adfca04f90f56201b26d79682d4cd2625e0bc7290b95123afe558ade" +checksum = "350b9cf31731f9957399229e9b2adc51eeabdfbe9d71d9a0552275fd12710d09" dependencies = [ "os_str_bytes", ] @@ -813,10 +796,10 @@ version = "0.1.0" dependencies = [ "convert_case 0.6.0", "parity-scale-codec", - "proc-macro2 1.0.51", + "proc-macro2 1.0.52", "quote 1.0.23", "serde", - "syn 1.0.107", + "syn 1.0.109", ] [[package]] @@ -831,9 +814,9 @@ dependencies = [ [[package]] name = "coins-bip32" -version = "0.7.0" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "634c509653de24b439672164bbf56f5f582a2ab0e313d3b0f6af0b7345cf2560" +checksum = "8a3b95d26eace980ade01e0ea8d996f7d5ae7031f9c5f258317ec82e400f33c1" dependencies = [ "bincode", "bs58", @@ -841,7 +824,7 @@ dependencies = [ "digest 0.10.6", "getrandom 0.2.8", "hmac", - "k256", + "k256 0.11.6", "lazy_static", "serde", "sha2 0.10.6", @@ -850,26 +833,28 @@ dependencies = [ [[package]] name = "coins-bip39" -version = "0.7.0" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a11892bcac83b4c6e95ab84b5b06c76d9d70ad73548dd07418269c5c7977171" +checksum = "a05ceda6ab4876de899fe23e8a171b200be9a346289bce12d41311e4bce2f104" dependencies = [ "bitvec 0.17.4", "coins-bip32", "getrandom 0.2.8", "hex", "hmac", + "once_cell", "pbkdf2", "rand 0.8.5", "sha2 0.10.6", "thiserror", + "tracing", ] [[package]] name = "coins-core" -version = "0.7.0" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c94090a6663f224feae66ab01e41a2555a8296ee07b5f20dab8888bdefc9f617" +checksum = "87637d08a1dd6bfa3bcd697f0a4de6c1b3a03e085b9d841a7e9cde4ccb61514b" dependencies = [ "base58check", "base64 0.12.3", @@ -912,9 +897,9 @@ dependencies = [ [[package]] name = "const-oid" -version = "0.9.1" +version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cec318a675afcb6a1ea1d4340e2d377e56e47c266f28043ceccbf4412ddfdd3b" +checksum = "520fbf3c07483f94e3e3ca9d0cfd913d7718ef2483d2cfd91c0d9e91474ab913" [[package]] name = "convert_case" @@ -1002,7 +987,7 @@ dependencies = [ "clap 3.2.23", "criterion-plot", "futures", - "itertools 0.10.5", + "itertools", "lazy_static", "num-traits", "oorandom", @@ -1023,7 +1008,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6b50826342786a51a89e2da3a28f1c32b06e387201bc2d19791f622c673706b1" dependencies = [ "cast", - "itertools 0.10.5", + "itertools", ] [[package]] @@ -1034,9 +1019,9 @@ checksum = "6548a0ad5d2549e111e1f6a11a6c2e2d00ce6a3dafe22948d67c2b443f775e52" [[package]] name = "crossbeam-channel" -version = "0.5.6" +version = "0.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2dd04ddaf88237dc3b8d8f9a3c1004b506b54b3313403944054d23c0870c521" +checksum = "cf2b3e8478797446514c91ef04bafcb59faba183e621ad488df88983cc14128c" dependencies = [ "cfg-if", "crossbeam-utils", @@ -1044,9 +1029,9 @@ dependencies = [ [[package]] name = "crossbeam-deque" -version = "0.8.2" +version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "715e8152b692bba2d374b53d4875445368fdf21a94751410af607a5ac677d1fc" +checksum = "ce6fd6f855243022dcecf8702fef0c297d4338e226845fe067f6341ad9fa0cef" dependencies = [ "cfg-if", "crossbeam-epoch", @@ -1055,9 +1040,9 @@ dependencies = [ [[package]] name = "crossbeam-epoch" -version = "0.9.13" +version = "0.9.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01a9af1f4c2ef74bb8aa1f7e19706bc72d03598c8a570bb5de72243c7a9d9d5a" +checksum = "46bd5f3f85273295a9d14aedfb86f6aadbff6d8f5295c4a9edb08e819dcf5695" dependencies = [ "autocfg", "cfg-if", @@ -1068,9 +1053,9 @@ dependencies = [ [[package]] name = "crossbeam-utils" -version = "0.8.14" +version = "0.8.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fb766fa798726286dbbb842f174001dab8abc7b627a1dd86e0b7222a95d929f" +checksum = "3c063cd8cc95f5c377ed0d4b49a4b21f632396ff690e8470c29b3359b346984b" dependencies = [ "cfg-if", ] @@ -1118,6 +1103,18 @@ dependencies = [ "zeroize", ] +[[package]] +name = "crypto-bigint" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "071c0f5945634bc9ba7a452f492377dd6b1993665ddb58f28704119b32f07a9a" +dependencies = [ + "generic-array 0.14.6", + "rand_core 0.6.4", + "subtle", + "zeroize", +] + [[package]] name = "crypto-common" version = "0.1.6" @@ -1161,9 +1158,9 @@ dependencies = [ [[package]] name = "cxx" -version = "1.0.88" +version = "1.0.91" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "322296e2f2e5af4270b54df9e85a02ff037e271af20ba3e7fe1575515dc840b8" +checksum = "86d3488e7665a7a483b57e25bdd90d0aeb2bc7608c8d0346acf2ad3f1caf1d62" dependencies = [ "cc", "cxxbridge-flags", @@ -1173,34 +1170,34 @@ dependencies = [ [[package]] name = "cxx-build" -version = "1.0.88" +version = "1.0.91" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "017a1385b05d631e7875b1f151c9f012d37b53491e2a87f65bff5c262b2111d8" +checksum = "48fcaf066a053a41a81dfb14d57d99738b767febb8b735c3016e469fac5da690" dependencies = [ "cc", "codespan-reporting", "once_cell", - "proc-macro2 1.0.51", + "proc-macro2 1.0.52", "quote 1.0.23", "scratch", - "syn 1.0.107", + "syn 1.0.109", ] [[package]] name = "cxxbridge-flags" -version = "1.0.88" +version = "1.0.91" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c26bbb078acf09bc1ecda02d4223f03bdd28bd4874edcb0379138efc499ce971" +checksum = "a2ef98b8b717a829ca5603af80e1f9e2e48013ab227b68ef37872ef84ee479bf" [[package]] name = "cxxbridge-macro" -version = "1.0.88" +version = "1.0.91" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "357f40d1f06a24b60ae1fe122542c1fb05d28d32acb2aed064e84bc2ad1e252e" +checksum = "086c685979a698443656e5cf7856c95c642295a38599f12fb1ff76fb28d19892" dependencies = [ - "proc-macro2 1.0.51", + "proc-macro2 1.0.52", "quote 1.0.23", - "syn 1.0.107", + "syn 1.0.109", ] [[package]] @@ -1215,12 +1212,12 @@ dependencies = [ [[package]] name = "darling" -version = "0.14.2" +version = "0.14.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0dd3cd20dc6b5a876612a6e5accfe7f3dd883db6d07acfbf14c128f61550dfa" +checksum = "c0808e1bd8671fb44a113a14e13497557533369847788fa2ae912b6ebfce9fa8" dependencies = [ - "darling_core 0.14.2", - "darling_macro 0.14.2", + "darling_core 0.14.3", + "darling_macro 0.14.3", ] [[package]] @@ -1231,24 +1228,24 @@ checksum = "f0c960ae2da4de88a91b2d920c2a7233b400bc33cb28453a2987822d8392519b" dependencies = [ "fnv", "ident_case", - "proc-macro2 1.0.51", + "proc-macro2 1.0.52", "quote 1.0.23", "strsim 0.9.3", - "syn 1.0.107", + "syn 1.0.109", ] [[package]] name = "darling_core" -version = "0.14.2" +version = "0.14.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a784d2ccaf7c98501746bf0be29b2022ba41fd62a2e622af997a03e9f972859f" +checksum = "001d80444f28e193f30c2f293455da62dcf9a6b29918a4253152ae2b1de592cb" dependencies = [ "fnv", "ident_case", - "proc-macro2 1.0.51", + "proc-macro2 1.0.52", "quote 1.0.23", "strsim 0.10.0", - "syn 1.0.107", + "syn 1.0.109", ] [[package]] @@ -1259,18 +1256,18 @@ checksum = "d9b5a2f4ac4969822c62224815d069952656cadc7084fdca9751e6d959189b72" dependencies = [ "darling_core 0.10.2", "quote 1.0.23", - "syn 1.0.107", + "syn 1.0.109", ] [[package]] name = "darling_macro" -version = "0.14.2" +version = "0.14.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7618812407e9402654622dd402b0a89dff9ba93badd6540781526117b92aab7e" +checksum = "b36230598a2d5de7ec1c6f51f72d8a99a9208daff41de2084d06e3fd3ea56685" dependencies = [ - "darling_core 0.14.2", + "darling_core 0.14.3", "quote 1.0.23", - "syn 1.0.107", + "syn 1.0.109", ] [[package]] @@ -1283,7 +1280,7 @@ dependencies = [ "hashbrown 0.12.3", "lock_api", "once_cell", - "parking_lot_core 0.9.6", + "parking_lot_core 0.9.7", ] [[package]] @@ -1321,15 +1318,25 @@ dependencies = [ "zeroize", ] +[[package]] +name = "der" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc906908ea6458456e5eaa160a9c08543ec3d1e6f71e2235cedd660cb65f9df0" +dependencies = [ + "const-oid", + "zeroize", +] + [[package]] name = "derive_arbitrary" version = "1.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8beee4701e2e229e8098bbdecdca12449bc3e322f137d269182fa1291e20bd00" dependencies = [ - "proc-macro2 1.0.51", + "proc-macro2 1.0.52", "quote 1.0.23", - "syn 1.0.107", + "syn 1.0.109", ] [[package]] @@ -1340,9 +1347,9 @@ checksum = "a2658621297f2cf68762a6f7dc0bb7e1ff2cfd6583daef8ee0fed6f7ec468ec0" dependencies = [ "darling 0.10.2", "derive_builder_core", - "proc-macro2 1.0.51", + "proc-macro2 1.0.52", "quote 1.0.23", - "syn 1.0.107", + "syn 1.0.109", ] [[package]] @@ -1352,9 +1359,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2791ea3e372c8495c0bc2033991d76b512cd799d07491fbd6890124db9458bef" dependencies = [ "darling 0.10.2", - "proc-macro2 1.0.51", + "proc-macro2 1.0.52", "quote 1.0.23", - "syn 1.0.107", + "syn 1.0.109", ] [[package]] @@ -1364,12 +1371,18 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" dependencies = [ "convert_case 0.4.0", - "proc-macro2 1.0.51", + "proc-macro2 1.0.52", "quote 1.0.23", "rustc_version", - "syn 1.0.107", + "syn 1.0.109", ] +[[package]] +name = "difflib" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6184e33543162437515c2e2b48714794e37845ec9851711914eec9d308f6ebe8" + [[package]] name = "digest" version = "0.8.1" @@ -1451,14 +1464,14 @@ dependencies = [ [[package]] name = "discv5" -version = "0.1.0" -source = "git+https://github.com/sigp/discv5#97a806ccf7817a420b5f43efa23e6127b475d839" +version = "0.2.1" +source = "git+https://github.com/sigp/discv5#e3a6fe7c6efcdfb52b0782c232ef7a3659d46e80" dependencies = [ "aes 0.7.5", "aes-gcm", "arrayvec", "delay_map", - "enr 0.6.2", + "enr 0.7.0", "fnv", "futures", "hashlink", @@ -1493,11 +1506,23 @@ dependencies = [ "winapi", ] +[[package]] +name = "downcast" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1435fa1053d8b2fbbe9be7e97eca7f33d37b28409959813daefc1446a14247f1" + +[[package]] +name = "dunce" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0bd4b30a6560bbd9b4620f4de34c3f14f60848e58a9b7216801afcb4c7b31c3c" + [[package]] name = "dyn-clone" -version = "1.0.10" +version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c9b0705efd4599c15a38151f4721f7bc388306f61084d3bfd50bd07fbca5cb60" +checksum = "68b0cf012f1230e43cd00ebb729c6bb58707ecfa8ad08b52ef3a4ccd2697fc30" [[package]] name = "ecdsa" @@ -1505,10 +1530,22 @@ version = "0.14.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "413301934810f597c1d19ca71c8710e99a3f1ba28a0d2ebc01551a2daeea3c5c" dependencies = [ - "der", - "elliptic-curve", - "rfc6979", - "signature", + "der 0.6.1", + "elliptic-curve 0.12.3", + "rfc6979 0.3.1", + "signature 1.6.4", +] + +[[package]] +name = "ecdsa" +version = "0.16.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1b0a1222f8072619e8a6b667a854020a03d363738303203c09468b3424a420a" +dependencies = [ + "der 0.7.1", + "elliptic-curve 0.13.2", + "rfc6979 0.4.0", + "signature 2.0.0", ] [[package]] @@ -1517,7 +1554,7 @@ version = "1.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "91cff35c70bba8a626e3185d8cd48cc11b5437e1a5bcd15b9b5fa3c64b6dfee7" dependencies = [ - "signature", + "signature 1.6.4", ] [[package]] @@ -1541,9 +1578,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cb0188e3c3ba8df5753894d54461f0e39bc91741dc5b22e1c46999ec2c71f4e4" dependencies = [ "enum-ordinalize", - "proc-macro2 1.0.51", + "proc-macro2 1.0.52", "quote 1.0.23", - "syn 1.0.107", + "syn 1.0.109", ] [[package]] @@ -1558,25 +1595,44 @@ version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e7bb888ab5300a19b8e5bceef25ac745ad065f3c9f7efc6de1b91958110891d3" dependencies = [ - "base16ct", - "crypto-bigint", - "der", + "base16ct 0.1.1", + "crypto-bigint 0.4.9", + "der 0.6.1", + "digest 0.10.6", + "ff 0.12.1", + "generic-array 0.14.6", + "group 0.12.1", + "pkcs8 0.9.0", + "rand_core 0.6.4", + "sec1 0.3.0", + "subtle", + "zeroize", +] + +[[package]] +name = "elliptic-curve" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ea5a92946e8614bb585254898bb7dd1ddad241ace60c52149e3765e34cc039d" +dependencies = [ + "base16ct 0.2.0", + "crypto-bigint 0.5.0", "digest 0.10.6", - "ff", + "ff 0.13.0", "generic-array 0.14.6", - "group", - "pkcs8", + "group 0.13.0", + "pkcs8 0.10.1", "rand_core 0.6.4", - "sec1", + "sec1 0.7.1", "subtle", "zeroize", ] [[package]] name = "encoding_rs" -version = "0.8.31" +version = "0.8.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9852635589dc9f9ea1b6fe9f05b50ef208c85c834a562f0c6abb1c475736ec2b" +checksum = "071a31f4ee85403370b58aca746f01041ede6f0da2730960ad001edc2b71b394" dependencies = [ "cfg-if", ] @@ -1589,16 +1645,16 @@ checksum = "c34f04666d835ff5d62e058c3995147c06f42fe86ff053337632bca83e42702d" [[package]] name = "enr" -version = "0.6.2" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26fa0a0be8915790626d5759eb51fe47435a8eac92c2f212bd2da9aa7f30ea56" +checksum = "492a7e5fc2504d5fdce8e124d3e263b244a68b283cac67a69eda0cd43e0aebad" dependencies = [ "base64 0.13.1", "bs58", "bytes", "ed25519-dalek", "hex", - "k256", + "k256 0.11.6", "log", "rand 0.8.5", "rlp", @@ -1609,19 +1665,18 @@ dependencies = [ [[package]] name = "enr" -version = "0.7.0" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "492a7e5fc2504d5fdce8e124d3e263b244a68b283cac67a69eda0cd43e0aebad" +checksum = "eb4d5fbf6f56acecd38f5988eb2e4ae412008a2a30268c748c701ec6322f39d4" dependencies = [ "base64 0.13.1", - "bs58", "bytes", "hex", - "k256", + "k256 0.13.0", "log", "rand 0.8.5", "rlp", - "secp256k1 0.24.3", + "secp256k1", "serde", "sha3", "zeroize", @@ -1634,9 +1689,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "570d109b813e904becc80d8d5da38376818a143348413f7149f1340fe04754d4" dependencies = [ "heck", - "proc-macro2 1.0.51", + "proc-macro2 1.0.52", "quote 1.0.23", - "syn 1.0.107", + "syn 1.0.109", ] [[package]] @@ -1646,9 +1701,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c9720bba047d567ffc8a3cba48bf19126600e249ab7f128e9233e6376976a116" dependencies = [ "heck", - "proc-macro2 1.0.51", + "proc-macro2 1.0.52", "quote 1.0.23", - "syn 1.0.107", + "syn 1.0.109", ] [[package]] @@ -1659,10 +1714,10 @@ checksum = "a62bb1df8b45ecb7ffa78dca1c17a438fb193eb083db0b1b494d2a61bcb5096a" dependencies = [ "num-bigint", "num-traits", - "proc-macro2 1.0.51", + "proc-macro2 1.0.52", "quote 1.0.23", "rustc_version", - "syn 1.0.107", + "syn 1.0.109", ] [[package]] @@ -1671,9 +1726,9 @@ version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e88bcb3a067a6555d577aba299e75eff9942da276e6506fc6274327daa026132" dependencies = [ - "proc-macro2 1.0.51", + "proc-macro2 1.0.52", "quote 1.0.23", - "syn 1.0.107", + "syn 1.0.109", ] [[package]] @@ -1769,9 +1824,11 @@ dependencies = [ [[package]] name = "ethers-contract" -version = "1.0.2" -source = "git+https://github.com/gakonst/ethers-rs#228f9447269e61ca7d7f868edd58c08772e38650" +version = "2.0.0" +source = "git+https://github.com/gakonst/ethers-rs#18a049b4c49965fbb4efbb394891767dc0fa1fa9" dependencies = [ + "ethers-contract-abigen", + "ethers-contract-derive", "ethers-core", "ethers-providers", "futures-util", @@ -1783,31 +1840,73 @@ dependencies = [ "thiserror", ] +[[package]] +name = "ethers-contract-abigen" +version = "2.0.0" +source = "git+https://github.com/gakonst/ethers-rs#18a049b4c49965fbb4efbb394891767dc0fa1fa9" +dependencies = [ + "Inflector", + "cfg-if", + "dunce", + "ethers-core", + "ethers-etherscan", + "eyre", + "getrandom 0.2.8", + "hex", + "prettyplease", + "proc-macro2 1.0.52", + "quote 1.0.23", + "regex", + "reqwest", + "serde", + "serde_json", + "syn 1.0.109", + "tokio", + "toml", + "url", + "walkdir", +] + +[[package]] +name = "ethers-contract-derive" +version = "2.0.0" +source = "git+https://github.com/gakonst/ethers-rs#18a049b4c49965fbb4efbb394891767dc0fa1fa9" +dependencies = [ + "ethers-contract-abigen", + "ethers-core", + "hex", + "proc-macro2 1.0.52", + "quote 1.0.23", + "syn 1.0.109", +] + [[package]] name = "ethers-core" -version = "1.0.2" -source = "git+https://github.com/gakonst/ethers-rs#228f9447269e61ca7d7f868edd58c08772e38650" +version = "2.0.0" +source = "git+https://github.com/gakonst/ethers-rs#18a049b4c49965fbb4efbb394891767dc0fa1fa9" dependencies = [ "arrayvec", "bytes", + "cargo_metadata", "chrono", "convert_case 0.6.0", - "elliptic-curve", + "elliptic-curve 0.13.2", "ethabi", "generic-array 0.14.6", "getrandom 0.2.8", "hex", - "k256", + "k256 0.13.0", "num_enum", + "once_cell", "open-fastrlp", - "proc-macro2 1.0.51", + "proc-macro2 1.0.52", "rand 0.8.5", "rlp", "rlp-derive", "serde", "serde_json", "strum", - "syn 1.0.107", + "syn 1.0.109", "tempfile", "thiserror", "tiny-keccak", @@ -1816,13 +1915,13 @@ dependencies = [ [[package]] name = "ethers-etherscan" -version = "1.0.2" -source = "git+https://github.com/gakonst/ethers-rs#228f9447269e61ca7d7f868edd58c08772e38650" +version = "2.0.0" +source = "git+https://github.com/gakonst/ethers-rs#18a049b4c49965fbb4efbb394891767dc0fa1fa9" dependencies = [ "ethers-core", "getrandom 0.2.8", "reqwest", - "semver 1.0.16", + "semver 1.0.17", "serde", "serde-aux", "serde_json", @@ -1832,11 +1931,11 @@ dependencies = [ [[package]] name = "ethers-middleware" -version = "1.0.2" -source = "git+https://github.com/gakonst/ethers-rs#228f9447269e61ca7d7f868edd58c08772e38650" +version = "2.0.0" +source = "git+https://github.com/gakonst/ethers-rs#18a049b4c49965fbb4efbb394891767dc0fa1fa9" dependencies = [ "async-trait", - "auto_impl 0.5.0", + "auto_impl", "ethers-contract", "ethers-core", "ethers-etherscan", @@ -1857,13 +1956,13 @@ dependencies = [ [[package]] name = "ethers-providers" -version = "1.0.2" -source = "git+https://github.com/gakonst/ethers-rs#228f9447269e61ca7d7f868edd58c08772e38650" +version = "2.0.0" +source = "git+https://github.com/gakonst/ethers-rs#18a049b4c49965fbb4efbb394891767dc0fa1fa9" dependencies = [ "async-trait", - "auto_impl 1.0.1", + "auto_impl", "base64 0.21.0", - "enr 0.7.0", + "enr 0.8.0", "ethers-core", "futures-channel", "futures-core", @@ -1894,19 +1993,20 @@ dependencies = [ [[package]] name = "ethers-signers" -version = "1.0.2" -source = "git+https://github.com/gakonst/ethers-rs#228f9447269e61ca7d7f868edd58c08772e38650" +version = "2.0.0" +source = "git+https://github.com/gakonst/ethers-rs#18a049b4c49965fbb4efbb394891767dc0fa1fa9" dependencies = [ "async-trait", "coins-bip32", "coins-bip39", - "elliptic-curve", + "elliptic-curve 0.13.2", "eth-keystore", "ethers-core", "hex", "rand 0.8.5", "sha2 0.10.6", "thiserror", + "tracing", ] [[package]] @@ -1939,9 +2039,9 @@ checksum = "e88a8acf291dafb59c2d96e8f59828f3838bb1a70398823ade51a84de6a6deed" [[package]] name = "fastrand" -version = "1.8.0" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7a407cfaa3385c4ae6b23e84623d48c2798d06e3e6a1878f7f59f17b3f86499" +checksum = "e51093e27b0797c359783294ca4f0a911c270184cb10f85783b118614a1501be" dependencies = [ "instant", ] @@ -1965,6 +2065,16 @@ dependencies = [ "subtle", ] +[[package]] +name = "ff" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ded41244b729663b1e574f1b4fb731469f69f79c17667b5d776b16cda0479449" +dependencies = [ + "rand_core 0.6.4", + "subtle", +] + [[package]] name = "findshlibs" version = "0.10.2" @@ -1999,6 +2109,15 @@ dependencies = [ "miniz_oxide", ] +[[package]] +name = "float-cmp" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "98de4bbd547a563b716d8dfa9aad1cb19bfab00f4fa09a6a4ed21dbcf44ce9c4" +dependencies = [ + "num-traits", +] + [[package]] name = "fnv" version = "1.0.7" @@ -2014,6 +2133,12 @@ dependencies = [ "percent-encoding", ] +[[package]] +name = "fragile" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c2141d6d6c8512188a7891b4b01590a45f6dac67afb4f255c4124dbb86d4eaa" + [[package]] name = "funty" version = "2.0.0" @@ -2068,21 +2193,6 @@ version = "0.3.26" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bfb8371b6fb2aeb2d280374607aeabfc99d95c72edfe51692e42d3d7f0d08531" -[[package]] -name = "futures-lite" -version = "1.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7694489acd39452c77daa48516b894c153f192c3578d5a839b62c58099fcbf48" -dependencies = [ - "fastrand", - "futures-core", - "futures-io", - "memchr", - "parking", - "pin-project-lite", - "waker-fn", -] - [[package]] name = "futures-locks" version = "0.7.1" @@ -2099,9 +2209,9 @@ version = "0.3.26" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "95a73af87da33b5acf53acfebdc339fe592ecf5357ac7c0a7734ab9d8c876a70" dependencies = [ - "proc-macro2 1.0.51", + "proc-macro2 1.0.52", "quote 1.0.23", - "syn 1.0.107", + "syn 1.0.109", ] [[package]] @@ -2170,6 +2280,7 @@ checksum = "bff49e947297f3312447abdca79f45f4738097cc82b06e72054d2223f601f1b9" dependencies = [ "typenum", "version_check", + "zeroize", ] [[package]] @@ -2282,16 +2393,27 @@ version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5dfbfb3a6cfbd390d5c9564ab283a0349b9b9fcd46a706c1eb10e0db70bfbac7" dependencies = [ - "ff", + "ff 0.12.1", + "rand_core 0.6.4", + "subtle", +] + +[[package]] +name = "group" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0f9ef7462f7c099f518d754361858f86d8a07af53ba9af0fe635bbccb151a63" +dependencies = [ + "ff 0.13.0", "rand_core 0.6.4", "subtle", ] [[package]] name = "h2" -version = "0.3.15" +version = "0.3.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f9f29bc9dda355256b2916cf526ab02ce0aeaaaf2bad60d65ef3f12f11dd0f4" +checksum = "5be7b54589b581f624f566bf5d8eb2bab1db736c51528720b6bd36b96b55924d" dependencies = [ "bytes", "fnv", @@ -2302,7 +2424,7 @@ dependencies = [ "indexmap", "slab", "tokio", - "tokio-util 0.7.4", + "tokio-util 0.7.7", "tracing", ] @@ -2402,15 +2524,15 @@ dependencies = [ "hash32", "rustc_version", "serde", - "spin 0.9.4", + "spin 0.9.5", "stable_deref_trait", ] [[package]] name = "heck" -version = "0.4.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2540771e65fc8cb83cd6e8a237f70c319bd5c29f78ed1084ba5d50eeac86f7f9" +checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" [[package]] name = "hermit-abi" @@ -2491,9 +2613,9 @@ dependencies = [ [[package]] name = "http" -version = "0.2.8" +version = "0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75f43d41e26995c17e71ee126451dd3941010b0514a81a9d11f3b341debc2399" +checksum = "bd6effc99afb63425aff9b05836f029929e345a6148a14b7ecd5ab67af944482" dependencies = [ "bytes", "fnv", @@ -2718,9 +2840,9 @@ version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "11d7a9f6330b71fea57921c9b61c47ee6e84f72d394754eff6163ae67e7395eb" dependencies = [ - "proc-macro2 1.0.51", + "proc-macro2 1.0.52", "quote 1.0.23", - "syn 1.0.107", + "syn 1.0.109", ] [[package]] @@ -2782,12 +2904,12 @@ dependencies = [ [[package]] name = "io-lifetimes" -version = "1.0.4" +version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7d6c6f8c91b4b9ed43484ad1a938e393caf35960fce7f82a040497207bd8e9e" +checksum = "1abeb7a0dd0f8181267ff8adc397075586500b81b28a73e8a0208b00fc170fb3" dependencies = [ "libc", - "windows-sys 0.42.0", + "windows-sys 0.45.0", ] [[package]] @@ -2819,9 +2941,9 @@ dependencies = [ [[package]] name = "is-terminal" -version = "0.4.3" +version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22e18b0a45d56fe973d6db23972bf5bc46f988a4a2385deac9cc29572f09daef" +checksum = "21b6b32576413a8e69b90e952e4a026476040d81017b80445deda5f2d3921857" dependencies = [ "hermit-abi 0.3.1", "io-lifetimes", @@ -2829,15 +2951,6 @@ dependencies = [ "windows-sys 0.45.0", ] -[[package]] -name = "itertools" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "284f18f85651fe11e8a991b2adb42cb078325c996ed026d994719efcfca1d54b" -dependencies = [ - "either", -] - [[package]] name = "itertools" version = "0.10.5" @@ -2849,15 +2962,15 @@ dependencies = [ [[package]] name = "itoa" -version = "1.0.5" +version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fad582f4b9e86b6caa621cabeb0963332d92eea04729ab12892c2533951e6440" +checksum = "453ad9f582a441959e5f0d088b02ce04cfe8d51a8eaf077f12ac6d3e94164ca6" [[package]] name = "js-sys" -version = "0.3.60" +version = "0.3.61" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49409df3e3bf0856b916e2ceaca09ee28e6871cf7d9ce97a692cacfdb2a25a47" +checksum = "445dde2150c55e483f3d8416706b97ec8e8237c307e5b7b4b8dd15e6af2a0730" dependencies = [ "wasm-bindgen", ] @@ -2899,7 +3012,7 @@ dependencies = [ "thiserror", "tokio", "tokio-rustls", - "tokio-util 0.7.4", + "tokio-util 0.7.7", "tracing", "webpki-roots", ] @@ -2960,9 +3073,9 @@ checksum = "baa6da1e4199c10d7b1d0a6e5e8bd8e55f351163b6f4b3cbb044672a69bd4c1c" dependencies = [ "heck", "proc-macro-crate", - "proc-macro2 1.0.51", + "proc-macro2 1.0.52", "quote 1.0.23", - "syn 1.0.107", + "syn 1.0.109", ] [[package]] @@ -2982,7 +3095,7 @@ dependencies = [ "soketto", "tokio", "tokio-stream", - "tokio-util 0.7.4", + "tokio-util 0.7.7", "tower", "tracing", ] @@ -3045,12 +3158,26 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72c1e0b51e7ec0a97369623508396067a486bd0cbed95a2659a4b863d28cfc8b" dependencies = [ "cfg-if", - "ecdsa", - "elliptic-curve", + "ecdsa 0.14.8", + "elliptic-curve 0.12.3", "sha2 0.10.6", "sha3", ] +[[package]] +name = "k256" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "955890845095ccf31ef83ad41a05aabb4d8cc23dc3cac5a9f5c89cf26dd0da75" +dependencies = [ + "cfg-if", + "ecdsa 0.16.1", + "elliptic-curve 0.13.2", + "once_cell", + "sha2 0.10.6", + "signature 2.0.0", +] + [[package]] name = "keccak" version = "0.1.3" @@ -3220,18 +3347,18 @@ checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" [[package]] name = "memmap2" -version = "0.5.8" +version = "0.5.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b182332558b18d807c4ce1ca8ca983b34c3ee32765e47b3f0f69b90355cc1dc" +checksum = "83faa42c0a078c393f6b29d5db232d8be22776a891f8f56e5284faee4a20b327" dependencies = [ "libc", ] [[package]] name = "memoffset" -version = "0.7.1" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5de893c32cde5f383baa4c04c5d6dbdd735cfd4a794b0debdb2bb1b421da5ff4" +checksum = "d61c719bcfbcf5d62b3a09efa6088de8c54bc0bfcd3ea7ae39fcc186108b8de1" dependencies = [ "autocfg", ] @@ -3272,9 +3399,9 @@ version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "731f8ecebd9f3a4aa847dfe75455e4757a45da40a7793d2f0b1f9b6ed18b23f3" dependencies = [ - "proc-macro2 1.0.51", + "proc-macro2 1.0.52", "quote 1.0.23", - "syn 1.0.107", + "syn 1.0.109", ] [[package]] @@ -3331,14 +3458,41 @@ dependencies = [ [[package]] name = "mio" -version = "0.8.5" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5d732bc30207a6423068df043e3d02e0735b155ad7ce1a6f76fe2baa5b158de" +checksum = "5b9d9a46eff5b4ff64b45a9e316a6d1e0bc719ef429cbec4dc630684212bfdf9" dependencies = [ "libc", "log", "wasi 0.11.0+wasi-snapshot-preview1", - "windows-sys 0.42.0", + "windows-sys 0.45.0", +] + +[[package]] +name = "mockall" +version = "0.11.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50e4a1c770583dac7ab5e2f6c139153b783a53a1bbee9729613f193e59828326" +dependencies = [ + "cfg-if", + "downcast", + "fragile", + "lazy_static", + "mockall_derive", + "predicates", + "predicates-tree", +] + +[[package]] +name = "mockall_derive" +version = "0.11.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "832663583d5fa284ca8810bf7015e46c9fff9622d3cf34bd1eea5003fec06dd0" +dependencies = [ + "cfg-if", + "proc-macro2 1.0.52", + "quote 1.0.23", + "syn 1.0.109", ] [[package]] @@ -3357,9 +3511,9 @@ version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5a7d5f7076603ebc68de2dc6a650ec331a062a13abaa346975be747bbfa4b789" dependencies = [ - "proc-macro2 1.0.51", + "proc-macro2 1.0.52", "quote 1.0.23", - "syn 1.0.107", + "syn 1.0.109", ] [[package]] @@ -3379,13 +3533,14 @@ dependencies = [ [[package]] name = "nix" -version = "0.24.3" +version = "0.26.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa52e972a9a719cecb6864fb88568781eb706bac2cd1d4f04a648542dbf78069" +checksum = "bfdda3d196821d6af13126e40375cdf7da646a96114af134d5f417a9a1dc8e1a" dependencies = [ "bitflags", "cfg-if", "libc", + "static_assertions", ] [[package]] @@ -3399,13 +3554,10 @@ dependencies = [ ] [[package]] -name = "nom8" -version = "0.2.0" +name = "normalize-line-endings" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae01545c9c7fc4486ab7debaf2aad7003ac19431791868fb2e8066df97fad2f8" -dependencies = [ - "memchr", -] +checksum = "61807f77802ff30975e01f4f071c8ba10c022052f98b3294119f3e615d13e5be" [[package]] name = "nu-ansi-term" @@ -3516,23 +3668,23 @@ dependencies = [ [[package]] name = "num_enum" -version = "0.5.9" +version = "0.5.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d829733185c1ca374f17e52b762f24f535ec625d2cc1f070e34c8a9068f341b" +checksum = "1f646caf906c20226733ed5b1374287eb97e3c2a5c227ce668c1f2ce20ae57c9" dependencies = [ "num_enum_derive", ] [[package]] name = "num_enum_derive" -version = "0.5.9" +version = "0.5.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2be1598bf1c313dcdd12092e3f1920f463462525a21b7b4e11b4168353d0123e" +checksum = "dcbff9bc912032c62bf65ef1d5aea88983b420f4f839db1e9b0c281a25c9c799" dependencies = [ "proc-macro-crate", - "proc-macro2 1.0.51", + "proc-macro2 1.0.52", "quote 1.0.23", - "syn 1.0.107", + "syn 1.0.109", ] [[package]] @@ -3575,7 +3727,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "786393f80485445794f6043fd3138854dd109cc6c4bd1a6383db304c9ce9b9ce" dependencies = [ "arrayvec", - "auto_impl 1.0.1", + "auto_impl", "bytes", "ethereum-types", "open-fastrlp-derive", @@ -3588,9 +3740,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "003b2be5c6c53c1cfeb0a238b8a1c3915cd410feb684457a36c10038f764bb1c" dependencies = [ "bytes", - "proc-macro2 1.0.51", + "proc-macro2 1.0.52", "quote 1.0.23", - "syn 1.0.107", + "syn 1.0.109", ] [[package]] @@ -3632,9 +3784,9 @@ dependencies = [ [[package]] name = "parity-scale-codec" -version = "3.3.0" +version = "3.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3840933452adf7b3b9145e27086a5a3376c619dca1a21b1e5a5af0d54979bed" +checksum = "637935964ff85a605d114591d4d2c13c5d1ba2806dae97cea6bf180238a749ac" dependencies = [ "arrayvec", "bitvec 1.0.1", @@ -3652,9 +3804,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "86b26a931f824dd4eca30b3e43bb4f31cd5f0d3a403c5f5ff27106b805bfde7b" dependencies = [ "proc-macro-crate", - "proc-macro2 1.0.51", + "proc-macro2 1.0.52", "quote 1.0.23", - "syn 1.0.107", + "syn 1.0.109", ] [[package]] @@ -3671,12 +3823,6 @@ dependencies = [ "winapi", ] -[[package]] -name = "parking" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "427c3892f9e783d91cc128285287e70a59e206ca452770ece88a76f7a3eddd72" - [[package]] name = "parking_lot" version = "0.11.2" @@ -3695,7 +3841,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" dependencies = [ "lock_api", - "parking_lot_core 0.9.6", + "parking_lot_core 0.9.7", ] [[package]] @@ -3714,15 +3860,15 @@ dependencies = [ [[package]] name = "parking_lot_core" -version = "0.9.6" +version = "0.9.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba1ef8814b5c993410bb3adfad7a5ed269563e4a2f90c41f5d85be7fb47133bf" +checksum = "9069cbb9f99e3a5083476ccb29ceb1de18b9118cafa53e90c9551235de2b9521" dependencies = [ "cfg-if", "libc", "redox_syscall", "smallvec", - "windows-sys 0.42.0", + "windows-sys 0.45.0", ] [[package]] @@ -3777,9 +3923,9 @@ checksum = "478c572c3d73181ff3c2539045f6eb99e5491218eae919370993b890cdbdd98e" [[package]] name = "pest" -version = "2.5.4" +version = "2.5.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ab62d2fa33726dbe6321cc97ef96d8cde531e3eeaf858a058de53a8a6d40d8f" +checksum = "8cbd939b234e95d72bc393d51788aec68aeeb5d51e748ca08ff3aad58cb722f7" dependencies = [ "thiserror", "ucd-trie", @@ -3810,9 +3956,9 @@ version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "069bdb1e05adc7a8990dce9cc75370895fbe4e3d58b9b73bf1aee56359344a55" dependencies = [ - "proc-macro2 1.0.51", + "proc-macro2 1.0.52", "quote 1.0.23", - "syn 1.0.107", + "syn 1.0.109", ] [[package]] @@ -3833,8 +3979,18 @@ version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9eca2c590a5f85da82668fa685c09ce2888b9430e83299debf1f34b65fd4a4ba" dependencies = [ - "der", - "spki", + "der 0.6.1", + "spki 0.6.0", +] + +[[package]] +name = "pkcs8" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d2820d87d2b008616e5c27212dd9e0e694fb4c6b522de06094106813328cb49" +dependencies = [ + "der 0.7.1", + "spki 0.7.0", ] [[package]] @@ -3894,9 +4050,9 @@ checksum = "26f6a7b87c2e435a3241addceeeff740ff8b7e76b74c13bf9acb17fa454ea00b" [[package]] name = "postcard" -version = "1.0.2" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c2b180dc0bade59f03fd005cb967d3f1e5f69b13922dad0cd6e047cb8af2363" +checksum = "cfa512cd0d087cc9f99ad30a1bf64795b67871edbead083ffc3a4dfafa59aa00" dependencies = [ "cobs", "heapless", @@ -3905,9 +4061,9 @@ dependencies = [ [[package]] name = "pprof" -version = "0.11.0" +version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e20150f965e0e4c925982b9356da71c84bcd56cb66ef4e894825837cbcf6613e" +checksum = "196ded5d4be535690899a4631cc9f18cdc41b7ebf24a79400f46f48e49a11059" dependencies = [ "backtrace", "cfg-if", @@ -3931,6 +4087,46 @@ version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" +[[package]] +name = "predicates" +version = "2.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59230a63c37f3e18569bdb90e4a89cbf5bf8b06fea0b84e65ea10cc4df47addd" +dependencies = [ + "difflib", + "float-cmp", + "itertools", + "normalize-line-endings", + "predicates-core", + "regex", +] + +[[package]] +name = "predicates-core" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72f883590242d3c6fc5bf50299011695fa6590c2c70eac95ee1bdb9a733ad1a2" + +[[package]] +name = "predicates-tree" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "54ff541861505aabf6ea722d2131ee980b8276e10a1297b94e896dd8b621850d" +dependencies = [ + "predicates-core", + "termtree", +] + +[[package]] +name = "prettyplease" +version = "0.1.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e97e3215779627f01ee256d2fad52f3d95e8e1c11e9fc6fd08f7cd455d5d5c78" +dependencies = [ + "proc-macro2 1.0.52", + "syn 1.0.109", +] + [[package]] name = "primitive-types" version = "0.12.1" @@ -3947,9 +4143,9 @@ dependencies = [ [[package]] name = "proc-macro-crate" -version = "1.3.0" +version = "1.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "66618389e4ec1c7afe67d51a9bf34ff9236480f8d51e7489b7d5ab0303c13f34" +checksum = "7f4c021e1093a56626774e81216a4ce732a735e5bad4868a03f3ed65ca0c3919" dependencies = [ "once_cell", "toml_edit", @@ -3962,9 +4158,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" dependencies = [ "proc-macro-error-attr", - "proc-macro2 1.0.51", + "proc-macro2 1.0.52", "quote 1.0.23", - "syn 1.0.107", + "syn 1.0.109", "version_check", ] @@ -3974,7 +4170,7 @@ version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" dependencies = [ - "proc-macro2 1.0.51", + "proc-macro2 1.0.52", "quote 1.0.23", "version_check", ] @@ -3990,9 +4186,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.51" +version = "1.0.52" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d727cae5b39d21da60fa540906919ad737832fe0b1c165da3a34d6548c849d6" +checksum = "1d0e1ae9e836cc3beddd63db0df682593d7e2d3d891ae8c9083d2113e1744224" dependencies = [ "unicode-ident", ] @@ -4102,7 +4298,7 @@ version = "1.0.23" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8856d8364d252a14d474036ea1358d63c9e6965c8e5c1885c18f73d70bff9c7b" dependencies = [ - "proc-macro2 1.0.51", + "proc-macro2 1.0.52", ] [[package]] @@ -4209,18 +4405,18 @@ dependencies = [ [[package]] name = "raw-cpuid" -version = "10.6.0" +version = "10.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a6823ea29436221176fe662da99998ad3b4db2c7f31e7b6f5fe43adccd6320bb" +checksum = "6c297679cb867470fa8c9f67dbba74a78d78e3e98d7cf2b08d6d71540f797332" dependencies = [ "bitflags", ] [[package]] name = "rayon" -version = "1.6.1" +version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6db3a213adf02b3bcfd2d3846bb41cb22857d131789e01df434fb7e7bc0759b7" +checksum = "1d2df5196e37bcc87abebc0053e20787d73847bb33134a69841207dd0a47f03b" dependencies = [ "either", "rayon-core", @@ -4228,9 +4424,9 @@ dependencies = [ [[package]] name = "rayon-core" -version = "1.10.2" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "356a0625f1954f730c0201cdab48611198dc6ce21f4acff55089b5a78e6e835b" +checksum = "4b8f95bd6966f5c87776639160a66bd8ab9895d9d4ab01ddba9fc60661aebe8d" dependencies = [ "crossbeam-channel", "crossbeam-deque", @@ -4284,15 +4480,6 @@ version = "0.6.28" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "456c603be3e8d448b072f410900c09faf164fbce2d480456f50eea6e25f9c848" -[[package]] -name = "remove_dir_all" -version = "0.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3acd125665422973a33ac9d3dd2df85edad0f4ae9b00dafb1a05e43a9f5ef8e7" -dependencies = [ - "winapi", -] - [[package]] name = "reqwest" version = "0.11.14" @@ -4347,7 +4534,7 @@ name = "reth" version = "0.1.0" dependencies = [ "backon", - "clap 4.1.4", + "clap 4.1.8", "comfy-table", "confy", "crossterm", @@ -4357,15 +4544,13 @@ dependencies = [ "futures", "human_bytes", "jsonrpsee", - "metrics", "metrics-exporter-prometheus", "metrics-util", "proptest", - "reth-consensus", + "reth-beacon-consensus", "reth-db", "reth-discv4", "reth-downloaders", - "reth-eth-wire", "reth-executor", "reth-interfaces", "reth-net-nat", @@ -4386,12 +4571,18 @@ dependencies = [ "serde_json", "shellexpand", "tempfile", - "thiserror", "tokio", - "tokio-stream", "tracing", "tui", - "walkdir", +] + +[[package]] +name = "reth-beacon-consensus" +version = "0.1.0" +dependencies = [ + "reth-consensus-common", + "reth-interfaces", + "reth-primitives", ] [[package]] @@ -4410,14 +4601,14 @@ dependencies = [ ] [[package]] -name = "reth-consensus" +name = "reth-consensus-common" version = "0.1.0" dependencies = [ "assert_matches", + "mockall", "reth-interfaces", "reth-primitives", "reth-provider", - "tokio", ] [[package]] @@ -4446,7 +4637,7 @@ dependencies = [ "reth-interfaces", "reth-libmdbx", "reth-primitives", - "secp256k1 0.24.3", + "secp256k1", "serde", "serde_json", "tempfile", @@ -4461,7 +4652,7 @@ name = "reth-discv4" version = "0.1.0" dependencies = [ "discv5", - "enr 0.7.0", + "enr 0.8.0", "generic-array 0.14.6", "hex", "rand 0.8.5", @@ -4471,7 +4662,7 @@ dependencies = [ "reth-rlp", "reth-rlp-derive", "reth-tracing", - "secp256k1 0.24.3", + "secp256k1", "serde", "thiserror", "tokio", @@ -4485,15 +4676,15 @@ version = "0.1.0" dependencies = [ "async-trait", "data-encoding", - "enr 0.7.0", + "enr 0.8.0", "linked_hash_set", - "lru 0.9.0", "parking_lot 0.12.1", "reth-net-common", "reth-primitives", "reth-rlp", "reth-tracing", - "secp256k1 0.24.3", + "schnellru", + "secp256k1", "serde", "serde_with", "thiserror", @@ -4510,7 +4701,7 @@ dependencies = [ "assert_matches", "futures", "futures-util", - "itertools 0.10.5", + "itertools", "metrics", "pin-project", "rayon", @@ -4526,7 +4717,7 @@ dependencies = [ "thiserror", "tokio", "tokio-stream", - "tokio-util 0.7.4", + "tokio-util 0.7.7", "tracing", ] @@ -4550,13 +4741,13 @@ dependencies = [ "reth-net-common", "reth-primitives", "reth-rlp", - "secp256k1 0.24.3", + "secp256k1", "sha2 0.10.6", "sha3", "thiserror", "tokio", "tokio-stream", - "tokio-util 0.7.4", + "tokio-util 0.7.7", "tracing", "typenum", ] @@ -4570,6 +4761,7 @@ dependencies = [ "bytes", "ethers-core", "futures", + "hex", "hex-literal", "metrics", "pin-project", @@ -4581,7 +4773,7 @@ dependencies = [ "reth-primitives", "reth-rlp", "reth-tracing", - "secp256k1 0.24.3", + "secp256k1", "serde", "smol_str", "snap", @@ -4589,7 +4781,7 @@ dependencies = [ "thiserror", "tokio", "tokio-stream", - "tokio-util 0.7.4", + "tokio-util 0.7.7", "tracing", ] @@ -4597,16 +4789,18 @@ dependencies = [ name = "reth-executor" version = "0.1.0" dependencies = [ + "aquamarine", "async-trait", - "auto_impl 1.0.1", + "auto_impl", "hash-db", - "hashbrown 0.13.2", + "parking_lot 0.12.1", "plain_hasher", "reth-db", "reth-interfaces", "reth-primitives", "reth-provider", "reth-revm", + "reth-revm-inspectors", "reth-rlp", "revm", "rlp", @@ -4623,7 +4817,7 @@ version = "0.1.0" dependencies = [ "arbitrary", "async-trait", - "auto_impl 1.0.1", + "auto_impl", "futures", "hex-literal", "modular-bitfield", @@ -4636,7 +4830,7 @@ dependencies = [ "reth-primitives", "reth-rpc-types", "revm-primitives", - "secp256k1 0.24.3", + "secp256k1", "thiserror", "tokio", "tokio-stream", @@ -4655,7 +4849,7 @@ dependencies = [ "serde_json", "thiserror", "tokio", - "tokio-util 0.7.4", + "tokio-util 0.7.7", "tower", "tracing", "tracing-test", @@ -4705,11 +4899,11 @@ version = "0.1.0" dependencies = [ "metrics", "once_cell", - "proc-macro2 1.0.51", + "proc-macro2 1.0.52", "quote 1.0.23", "regex", "serial_test", - "syn 1.0.107", + "syn 1.0.109", "trybuild", ] @@ -4742,8 +4936,8 @@ version = "0.1.0" dependencies = [ "aquamarine", "async-trait", - "auto_impl 1.0.1", - "enr 0.7.0", + "auto_impl", + "enr 0.8.0", "ethers-core", "ethers-middleware", "ethers-providers", @@ -4775,7 +4969,7 @@ dependencies = [ "reth-tasks", "reth-tracing", "reth-transaction-pool", - "secp256k1 0.24.3", + "secp256k1", "serde", "serde_json", "serial_test", @@ -4783,7 +4977,7 @@ dependencies = [ "thiserror", "tokio", "tokio-stream", - "tokio-util 0.7.4", + "tokio-util 0.7.7", "tracing", ] @@ -4817,7 +5011,6 @@ dependencies = [ "impl-serde", "modular-bitfield", "once_cell", - "parity-scale-codec", "plain_hasher", "pprof", "proptest", @@ -4827,7 +5020,7 @@ dependencies = [ "reth-rlp", "reth-rlp-derive", "revm-primitives", - "secp256k1 0.24.3", + "secp256k1", "serde", "serde_json", "serde_with", @@ -4844,14 +5037,24 @@ dependencies = [ name = "reth-provider" version = "0.1.0" dependencies = [ - "auto_impl 1.0.1", + "assert_matches", + "auto_impl", + "cita_trie", + "hasher", + "itertools", "parking_lot 0.12.1", + "proptest", + "reth-codecs", "reth-db", "reth-interfaces", "reth-primitives", "reth-revm-primitives", + "reth-rlp", + "reth-tracing", "revm-primitives", "thiserror", + "tracing", + "triehash", ] [[package]] @@ -4861,6 +5064,7 @@ dependencies = [ "reth-interfaces", "reth-primitives", "reth-provider", + "reth-revm-inspectors", "reth-revm-primitives", "revm", ] @@ -4871,7 +5075,9 @@ version = "0.1.0" dependencies = [ "hashbrown 0.13.2", "reth-primitives", + "reth-rpc-types", "revm", + "serde", ] [[package]] @@ -4887,20 +5093,16 @@ name = "reth-rlp" version = "0.1.2" dependencies = [ "arrayvec", - "auto_impl 1.0.1", + "auto_impl", "bytes", "criterion", - "enr 0.7.0", "ethereum-types", "ethnum", "hex-literal", "pprof", - "rand 0.8.5", "reth-rlp", "reth-rlp-derive", "revm-primitives", - "rlp", - "secp256k1 0.24.3", "smol_str", ] @@ -4908,9 +5110,9 @@ dependencies = [ name = "reth-rlp-derive" version = "0.1.1" dependencies = [ - "proc-macro2 1.0.51", + "proc-macro2 1.0.52", "quote 1.0.23", - "syn 1.0.107", + "syn 1.0.109", ] [[package]] @@ -4918,6 +5120,9 @@ name = "reth-rpc" version = "0.1.0" dependencies = [ "async-trait", + "bytes", + "ethers-core", + "futures", "hex", "http", "http-body", @@ -4938,7 +5143,8 @@ dependencies = [ "reth-tasks", "reth-transaction-pool", "revm", - "secp256k1 0.26.0", + "schnellru", + "secp256k1", "serde", "serde_json", "thiserror", @@ -4972,6 +5178,7 @@ dependencies = [ "reth-rpc-api", "reth-rpc-engine-api", "reth-rpc-types", + "reth-tasks", "reth-tracing", "reth-transaction-pool", "serde", @@ -5007,6 +5214,7 @@ version = "0.1.0" dependencies = [ "jsonrpsee-types", "lru 0.9.0", + "rand 0.8.5", "reth-interfaces", "reth-network-api", "reth-primitives", @@ -5023,7 +5231,7 @@ version = "0.1.0" dependencies = [ "async-trait", "confy", - "enr 0.7.0", + "enr 0.8.0", "ethers-core", "ethers-middleware", "ethers-providers", @@ -5035,25 +5243,19 @@ dependencies = [ "reth-db", "reth-discv4", "reth-downloaders", - "reth-interfaces", "reth-net-nat", "reth-network", "reth-network-api", "reth-primitives", "reth-provider", - "reth-staged-sync", - "reth-stages", - "reth-tasks", "reth-tracing", - "secp256k1 0.24.3", + "secp256k1", "serde", "serde_json", "shellexpand", "tempfile", "thiserror", "tokio", - "tokio-stream", - "tokio-test", "tracing", "walkdir", ] @@ -5063,23 +5265,19 @@ name = "reth-stages" version = "0.1.0" dependencies = [ "aquamarine", - "arbitrary", "assert_matches", "async-trait", - "cita_trie", "criterion", - "eyre", "futures-util", - "hasher", - "itertools 0.10.5", + "itertools", "metrics", "num-traits", "paste", "pin-project", "pprof", - "proptest", "rand 0.8.5", "rayon", + "reth-codecs", "reth-db", "reth-downloaders", "reth-eth-wire", @@ -5088,16 +5286,11 @@ dependencies = [ "reth-metrics-derive", "reth-primitives", "reth-provider", - "reth-revm", "reth-rlp", - "reth-staged-sync", - "serde", - "tempfile", "thiserror", "tokio", "tokio-stream", "tracing", - "triehash", ] [[package]] @@ -5128,7 +5321,7 @@ version = "0.1.0" dependencies = [ "aquamarine", "async-trait", - "auto_impl 1.0.1", + "auto_impl", "bitflags", "fnv", "futures-util", @@ -5138,6 +5331,7 @@ dependencies = [ "rand 0.8.5", "reth-metrics-derive", "reth-primitives", + "reth-provider", "reth-rlp", "ruint", "serde", @@ -5149,9 +5343,9 @@ dependencies = [ [[package]] name = "revm" version = "3.0.0" -source = "git+https://github.com/bluealloy/revm#4d2f0741c5f9daec0ceb7cc7733d65ea4c496170" +source = "git+https://github.com/bluealloy/revm#3d8ca6641d2e72448c23f4596f769c8fd1c784d1" dependencies = [ - "auto_impl 1.0.1", + "auto_impl", "revm-interpreter", "revm-precompile", ] @@ -5159,7 +5353,7 @@ dependencies = [ [[package]] name = "revm-interpreter" version = "1.0.0" -source = "git+https://github.com/bluealloy/revm#4d2f0741c5f9daec0ceb7cc7733d65ea4c496170" +source = "git+https://github.com/bluealloy/revm#3d8ca6641d2e72448c23f4596f769c8fd1c784d1" dependencies = [ "derive_more", "enumn", @@ -5170,14 +5364,14 @@ dependencies = [ [[package]] name = "revm-precompile" version = "2.0.0" -source = "git+https://github.com/bluealloy/revm#4d2f0741c5f9daec0ceb7cc7733d65ea4c496170" +source = "git+https://github.com/bluealloy/revm#3d8ca6641d2e72448c23f4596f769c8fd1c784d1" dependencies = [ - "k256", + "k256 0.11.6", "num", "once_cell", "revm-primitives", "ripemd", - "secp256k1 0.26.0", + "secp256k1", "sha2 0.10.6", "sha3", "substrate-bn", @@ -5186,10 +5380,11 @@ dependencies = [ [[package]] name = "revm-primitives" version = "1.0.0" -source = "git+https://github.com/bluealloy/revm#4d2f0741c5f9daec0ceb7cc7733d65ea4c496170" +source = "git+https://github.com/bluealloy/revm#3d8ca6641d2e72448c23f4596f769c8fd1c784d1" dependencies = [ "arbitrary", - "auto_impl 1.0.1", + "auto_impl", + "bitvec 1.0.1", "bytes", "derive_more", "enumn", @@ -5212,16 +5407,26 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7743f17af12fa0b03b803ba12cd6a8d9483a587e89c69445e3909655c0b9fabb" dependencies = [ - "crypto-bigint", + "crypto-bigint 0.4.9", "hmac", "zeroize", ] +[[package]] +name = "rfc6979" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8dd2a808d456c4a54e300a23e9f5a67e122c3024119acbfd73e3bf664491cb2" +dependencies = [ + "hmac", + "subtle", +] + [[package]] name = "rgb" -version = "0.8.35" +version = "0.8.36" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7495acf66551cdb696b7711408144bcd3194fc78e32f3a09e809bfe7dd4a7ce3" +checksum = "20ec2d3e3fc7a92ced357df9cebd5a10b6fb2aa1ee797bf7e9ce2f17dffc8f59" dependencies = [ "bytemuck", ] @@ -5266,9 +5471,9 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e33d7b2abe0c340d8797fe2907d3f20d3b5ea5908683618bfe80df7f621f672a" dependencies = [ - "proc-macro2 1.0.51", + "proc-macro2 1.0.52", "quote 1.0.23", - "syn 1.0.107", + "syn 1.0.109", ] [[package]] @@ -5316,21 +5521,21 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" dependencies = [ - "semver 1.0.16", + "semver 1.0.17", ] [[package]] name = "rustix" -version = "0.36.7" +version = "0.36.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4fdebc4b395b7fbb9ab11e462e20ed9051e7b16e42d24042c776eca0ac81b03" +checksum = "fd5c6ff11fecd55b40746d1995a02f2eb375bf8c00d192d521ee09f42bef37bc" dependencies = [ "bitflags", "errno", "io-lifetimes", "libc", "linux-raw-sys", - "windows-sys 0.42.0", + "windows-sys 0.45.0", ] [[package]] @@ -5386,9 +5591,9 @@ dependencies = [ [[package]] name = "ryu" -version = "1.0.12" +version = "1.0.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b4b9743ed687d4b4bcedf9ff5eaa7398495ae14e61cba0a295704edbc7decde" +checksum = "f91339c0467de62360649f8d3e185ca8de4224ff281f66000de5eb2a77a79041" [[package]] name = "salsa20" @@ -5427,9 +5632,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "303959cf613a6f6efd19ed4b4ad5bf79966a13352716299ad532cfb115f4205c" dependencies = [ "proc-macro-crate", - "proc-macro2 1.0.51", + "proc-macro2 1.0.52", "quote 1.0.23", - "syn 1.0.107", + "syn 1.0.109", ] [[package]] @@ -5441,6 +5646,17 @@ dependencies = [ "windows-sys 0.42.0", ] +[[package]] +name = "schnellru" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "772575a524feeb803e5b0fcbc6dd9f367e579488197c94c6e4023aad2305774d" +dependencies = [ + "ahash 0.8.3", + "cfg-if", + "hashbrown 0.13.2", +] + [[package]] name = "scopeguard" version = "1.1.0" @@ -5449,9 +5665,9 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" [[package]] name = "scratch" -version = "1.0.3" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ddccb15bcce173023b3fedd9436f882a0739b8dfb45e4f6b6002bee5929f61b2" +checksum = "5d5e082f6ea090deaf0e6dd04b68360fd5cddb152af6ce8927c9d25db299f98c" [[package]] name = "scrypt" @@ -5481,23 +5697,26 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3be24c1842290c45df0a7bf069e0c268a747ad05a192f2fd7dcfdbc1cba40928" dependencies = [ - "base16ct", - "der", + "base16ct 0.1.1", + "der 0.6.1", "generic-array 0.14.6", - "pkcs8", + "pkcs8 0.9.0", "subtle", "zeroize", ] [[package]] -name = "secp256k1" -version = "0.24.3" +name = "sec1" +version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b1629c9c557ef9b293568b338dddfc8208c98a18c59d722a9d53f859d9c9b62" +checksum = "48518a2b5775ba8ca5b46596aae011caa431e6ce7e4a67ead66d92f08884220e" dependencies = [ - "rand 0.8.5", - "secp256k1-sys 0.6.1", - "serde", + "base16ct 0.2.0", + "der 0.7.1", + "generic-array 0.14.6", + "pkcs8 0.10.1", + "subtle", + "zeroize", ] [[package]] @@ -5507,16 +5726,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4124a35fe33ae14259c490fd70fa199a32b9ce9502f2ee6bc4f81ec06fa65894" dependencies = [ "rand 0.8.5", - "secp256k1-sys 0.8.0", -] - -[[package]] -name = "secp256k1-sys" -version = "0.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "83080e2c2fc1006e625be82e5d1eb6a43b7fd9578b617fcc55814daf286bba4b" -dependencies = [ - "cc", + "secp256k1-sys", + "serde", ] [[package]] @@ -5562,9 +5773,9 @@ dependencies = [ [[package]] name = "semver" -version = "1.0.16" +version = "1.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "58bc9567378fc7690d6b2addae4e60ac2eeea07becb2c64b9f218b53865cba2a" +checksum = "bebd363326d05ec3e2f532ab7660680f3b02130d780c299bca73469d521bc0ed" dependencies = [ "serde", ] @@ -5615,16 +5826,16 @@ version = "1.0.152" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "af487d118eecd09402d70a5d72551860e788df87b464af30e5ea6a38c75c541e" dependencies = [ - "proc-macro2 1.0.51", + "proc-macro2 1.0.52", "quote 1.0.23", - "syn 1.0.107", + "syn 1.0.109", ] [[package]] name = "serde_json" -version = "1.0.91" +version = "1.0.93" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "877c235533714907a8c2464236f5c4b2a17262ef1bd71f38f35ea592c8da6883" +checksum = "cad406b69c91885b5107daf2c29572f6c8cdb3c66826821e286c533490c0bc76" dependencies = [ "itoa", "ryu", @@ -5665,10 +5876,10 @@ version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a1966009f3c05f095697c537312f5415d1e3ed31ce0a56942bac4c771c5c335e" dependencies = [ - "darling 0.14.2", - "proc-macro2 1.0.51", + "darling 0.14.3", + "proc-macro2 1.0.52", "quote 1.0.23", - "syn 1.0.107", + "syn 1.0.109", ] [[package]] @@ -5691,9 +5902,9 @@ version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b64f9e531ce97c88b4778aad0ceee079216071cffec6ac9b904277f8f92e7fe3" dependencies = [ - "proc-macro2 1.0.51", + "proc-macro2 1.0.52", "quote 1.0.23", - "syn 1.0.107", + "syn 1.0.109", ] [[package]] @@ -5803,9 +6014,9 @@ checksum = "43b2853a4d09f215c24cc5489c992ce46052d359b5109343cbafbf26bc62f8a3" [[package]] name = "signal-hook" -version = "0.3.14" +version = "0.3.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a253b5e89e2698464fc26b545c9edceb338e18a89effeeecfea192c3025be29d" +checksum = "732768f1176d21d09e076c23a93123d40bba92d50c4058da34d45c8de8e682b9" dependencies = [ "libc", "signal-hook-registry", @@ -5824,9 +6035,9 @@ dependencies = [ [[package]] name = "signal-hook-registry" -version = "1.4.0" +version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e51e73328dc4ac0c7ccbda3a494dfa03df1de2f46018127f60c693f2648455b0" +checksum = "d8229b473baa5980ac72ef434c4415e70c4b5e71b423043adb4ba059f89c99a1" dependencies = [ "libc", ] @@ -5841,6 +6052,16 @@ dependencies = [ "rand_core 0.6.4", ] +[[package]] +name = "signature" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fe458c98333f9c8152221191a77e2a44e8325d0193484af2e9421a53019e57d" +dependencies = [ + "digest 0.10.6", + "rand_core 0.6.4", +] + [[package]] name = "simple_asn1" version = "0.6.2" @@ -5861,9 +6082,9 @@ checksum = "ceb945e54128e09c43d8e4f1277851bd5044c6fc540bbaa2ad888f60b3da9ae7" [[package]] name = "slab" -version = "0.4.7" +version = "0.4.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4614a76b2a8be0058caa9dbbaf66d988527d86d003c11a94fbd335d7661edcef" +checksum = "6528351c9bc8ab22353f9d776db39a20288e8d6c37ef8cfe3317cf875eecfc2d" dependencies = [ "autocfg", ] @@ -5876,9 +6097,9 @@ checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0" [[package]] name = "smol_str" -version = "0.1.23" +version = "0.1.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7475118a28b7e3a2e157ce0131ba8c5526ea96e90ee601d9f6bb2e286a35ab44" +checksum = "fad6c857cbab2627dcf01ec85a623ca4e7dcb5691cbaa3d7fb7653671f0d09c9" dependencies = [ "serde", ] @@ -5891,9 +6112,9 @@ checksum = "5e9f0ab6ef7eb7353d9119c170a436d1bf248eea575ac42d19d12f4e34130831" [[package]] name = "socket2" -version = "0.4.7" +version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02e2d2db9033d13a1567121ddd7a095ee144db4e1ca1b1bda3419bc0da294ebd" +checksum = "64a4a911eed85daf18834cfaa86a79b7d266ff93ff5ba14005426219480ed662" dependencies = [ "libc", "winapi", @@ -5923,9 +6144,9 @@ checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" [[package]] name = "spin" -version = "0.9.4" +version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f6002a767bff9e83f8eeecf883ecb8011875a21ae8da43bffb817a57e78cc09" +checksum = "7dccf47db1b41fa1573ed27ccf5e08e3ca771cb994f776668c5ebda893b248fc" dependencies = [ "lock_api", ] @@ -5937,7 +6158,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "67cf02bbac7a337dc36e4f5a693db6c21e7863f45070f7064577eb4367a3212b" dependencies = [ "base64ct", - "der", + "der 0.6.1", +] + +[[package]] +name = "spki" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0445c905640145c7ea8c1993555957f65e7c46d0535b91ba501bc9bfc85522f" +dependencies = [ + "base64ct", + "der 0.7.1", ] [[package]] @@ -5986,10 +6217,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e385be0d24f186b4ce2f9982191e7101bb737312ad61c1f2f984f34bcf85d59" dependencies = [ "heck", - "proc-macro2 1.0.51", + "proc-macro2 1.0.52", "quote 1.0.23", "rustversion", - "syn 1.0.107", + "syn 1.0.109", ] [[package]] @@ -6066,11 +6297,11 @@ dependencies = [ [[package]] name = "syn" -version = "1.0.107" +version = "1.0.109" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f4064b5b16e03ae50984a5a8ed5d4f8803e6bc1fd170a3cda91a1be4b18e3f5" +checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" dependencies = [ - "proc-macro2 1.0.51", + "proc-macro2 1.0.52", "quote 1.0.23", "unicode-ident", ] @@ -6081,9 +6312,9 @@ version = "0.12.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f" dependencies = [ - "proc-macro2 1.0.51", + "proc-macro2 1.0.52", "quote 1.0.23", - "syn 1.0.107", + "syn 1.0.109", "unicode-xid 0.2.4", ] @@ -6095,16 +6326,15 @@ checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" [[package]] name = "tempfile" -version = "3.3.0" +version = "3.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5cdb1ef4eaeeaddc8fbd371e5017057064af0911902ef36b39801f67cc6d79e4" +checksum = "af18f7ae1acd354b992402e9ec5864359d693cd8a79dcbef59f76891701c1e95" dependencies = [ "cfg-if", "fastrand", - "libc", "redox_syscall", - "remove_dir_all", - "winapi", + "rustix", + "windows-sys 0.42.0", ] [[package]] @@ -6116,6 +6346,12 @@ dependencies = [ "winapi-util", ] +[[package]] +name = "termtree" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95059e91184749cb66be6dc994f67f182b6d897cb3df74a5bf66b5e709295fd8" + [[package]] name = "test-fuzz" version = "3.0.5" @@ -6135,7 +6371,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a9186daca5c58cb307d09731e0ba06b13fd6c036c90672b9bfc31cecf76cf689" dependencies = [ "cargo_metadata", - "proc-macro2 1.0.51", + "proc-macro2 1.0.52", "quote 1.0.23", "serde", "strum_macros", @@ -6147,13 +6383,13 @@ version = "3.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "57d187b450bfb5b7939f82f9747dc1ebb15a7a9c4a93cd304a41aece7149608b" dependencies = [ - "darling 0.14.2", + "darling 0.14.3", "if_chain", "lazy_static", - "proc-macro2 1.0.51", + "proc-macro2 1.0.52", "quote 1.0.23", "subprocess", - "syn 1.0.107", + "syn 1.0.109", "test-fuzz-internal", "toolchain_find", "unzip-n", @@ -6181,38 +6417,39 @@ checksum = "222a222a5bfe1bba4a77b45ec488a741b3cb8872e5e499451fd7d0129c9c7c3d" [[package]] name = "thiserror" -version = "1.0.38" +version = "1.0.39" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a9cd18aa97d5c45c6603caea1da6628790b37f7a34b6ca89522331c5180fed0" +checksum = "a5ab016db510546d856297882807df8da66a16fb8c4101cb8b30054b0d5b2d9c" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.38" +version = "1.0.39" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fb327af4685e4d03fa8cbcf1716380da910eeb2bb8be417e7f9fd3fb164f36f" +checksum = "5420d42e90af0c38c3290abcca25b9b3bdf379fc9f55c528f53a269d9c9a267e" dependencies = [ - "proc-macro2 1.0.51", + "proc-macro2 1.0.52", "quote 1.0.23", - "syn 1.0.107", + "syn 1.0.109", ] [[package]] name = "thread_local" -version = "1.1.4" +version = "1.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5516c27b78311c50bf42c071425c560ac799b11c30b31f87e3081965fe5e0180" +checksum = "3fdd6f064ccff2d6567adcb3873ca630700f00b5ad3f060c25b5dcfd9a4ce152" dependencies = [ + "cfg-if", "once_cell", ] [[package]] name = "time" -version = "0.3.17" +version = "0.3.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a561bf4617eebd33bca6434b988f39ed798e527f51a1e797d0ee4f61c0a38376" +checksum = "cd0cbfecb4d19b5ea75bb31ad904eb5b9fa13f21079c3b92017ebdf4999a5890" dependencies = [ "itoa", "serde", @@ -6228,9 +6465,9 @@ checksum = "2e153e1f1acaef8acc537e68b44906d2db6436e2b35ac2c6b42640fff91f00fd" [[package]] name = "time-macros" -version = "0.2.6" +version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d967f99f534ca7e495c575c62638eebc2898a8c84c119b89e250477bc4ba16b2" +checksum = "fd80a657e71da814b8e5d60d3374fc6d35045062245d80224748ae522dd76f36" dependencies = [ "time-core", ] @@ -6265,15 +6502,15 @@ dependencies = [ [[package]] name = "tinyvec_macros" -version = "0.1.0" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.25.0" +version = "1.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8e00990ebabbe4c14c08aca901caed183ecd5c09562a12c824bb53d3c3fd3af" +checksum = "03201d01c3c27a29c8a5cee5b55a93ddae1ccf6f08f65365c2c918f8c1b76f64" dependencies = [ "autocfg", "bytes", @@ -6286,7 +6523,7 @@ dependencies = [ "signal-hook-registry", "socket2", "tokio-macros", - "windows-sys 0.42.0", + "windows-sys 0.45.0", ] [[package]] @@ -6295,9 +6532,9 @@ version = "1.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d266c00fde287f55d3f1c3e96c500c362a2b8c695076ec180f27918820bc6df8" dependencies = [ - "proc-macro2 1.0.51", + "proc-macro2 1.0.52", "quote 1.0.23", - "syn 1.0.107", + "syn 1.0.109", ] [[package]] @@ -6313,27 +6550,14 @@ dependencies = [ [[package]] name = "tokio-stream" -version = "0.1.11" +version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d660770404473ccd7bc9f8b28494a811bc18542b915c0855c51e8f419d5223ce" +checksum = "8fb52b74f05dbf495a8fba459fdc331812b96aa086d9eb78101fa0d4569c3313" dependencies = [ "futures-core", "pin-project-lite", "tokio", - "tokio-util 0.7.4", -] - -[[package]] -name = "tokio-test" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53474327ae5e166530d17f2d956afcb4f8a004de581b3cae10f12006bc8163e3" -dependencies = [ - "async-stream", - "bytes", - "futures-core", - "tokio", - "tokio-stream", + "tokio-util 0.7.7", ] [[package]] @@ -6365,9 +6589,9 @@ dependencies = [ [[package]] name = "tokio-util" -version = "0.7.4" +version = "0.7.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bb2e075f03b3d66d8d8785356224ba688d2906a371015e225beeb65ca92c740" +checksum = "5427d89453009325de0d8f342c9490009f76e999cb7672d77e46267448f7e6b2" dependencies = [ "bytes", "futures-core", @@ -6389,19 +6613,19 @@ dependencies = [ [[package]] name = "toml_datetime" -version = "0.5.1" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4553f467ac8e3d374bc9a177a26801e5d0f9b211aa1673fb137a403afd1c9cf5" +checksum = "3ab8ed2edee10b50132aed5f331333428b011c99402b5a534154ed15746f9622" [[package]] name = "toml_edit" -version = "0.18.1" +version = "0.19.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56c59d8dd7d0dcbc6428bf7aa2f0e823e26e43b3c9aca15bbc9475d23e5fa12b" +checksum = "9a1eb0622d28f4b9c90adc4ea4b2b46b47663fde9ac5fafcb14a1369d5508825" dependencies = [ "indexmap", - "nom8", "toml_datetime", + "winnow", ] [[package]] @@ -6432,7 +6656,7 @@ dependencies = [ "rand 0.8.5", "slab", "tokio", - "tokio-util 0.7.4", + "tokio-util 0.7.7", "tower-layer", "tower-service", "tracing", @@ -6460,7 +6684,7 @@ dependencies = [ "percent-encoding", "pin-project-lite", "tokio", - "tokio-util 0.7.4", + "tokio-util 0.7.7", "tower", "tower-layer", "tower-service", @@ -6510,9 +6734,9 @@ version = "0.1.23" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4017f8f45139870ca7e672686113917c71c7a6e02d4924eda67186083c03081a" dependencies = [ - "proc-macro2 1.0.51", + "proc-macro2 1.0.52", "quote 1.0.23", - "syn 1.0.107", + "syn 1.0.109", ] [[package]] @@ -6579,9 +6803,9 @@ dependencies = [ [[package]] name = "tracing-test" -version = "0.2.3" +version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e3d272c44878d2bbc9f4a20ad463724f03e19dbc667c6e84ac433ab7ffcc70b" +checksum = "3a2c0ff408fe918a94c428a3f2ad04e4afd5c95bbc08fcf868eff750c15728a4" dependencies = [ "lazy_static", "tracing-core", @@ -6591,13 +6815,13 @@ dependencies = [ [[package]] name = "tracing-test-macro" -version = "0.2.3" +version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "744324b12d69a9fc1edea4b38b7b1311295b662d161ad5deac17bb1358224a08" +checksum = "258bc1c4f8e2e73a977812ab339d503e6feeb92700f6d07a6de4d321522d5c08" dependencies = [ "lazy_static", "quote 1.0.23", - "syn 1.0.107", + "syn 1.0.109", ] [[package]] @@ -6708,9 +6932,9 @@ checksum = "3528ecfd12c466c6f163363caf2d02a71161dd5e1cc6ae7b34207ea2d42d81ed" [[package]] name = "trybuild" -version = "1.0.77" +version = "1.0.78" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a44da5a6f2164c8e14d3bbc0657d69c5966af9f5f6930d4f600b1f5c4a673413" +checksum = "223fc354447478d08231355617eb8c37affad0e83d33aeac30a8c275786b905a" dependencies = [ "basic-toml", "glob", @@ -6800,9 +7024,9 @@ checksum = "d54675592c1dbefd78cbd98db9bacd89886e1ca50692a0692baefffdeb92dd58" [[package]] name = "unicode-ident" -version = "1.0.6" +version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "84a22b9f218b40614adcb3f4ff08b703773ad44fa9423e4e0d346d5db86e4ebc" +checksum = "775c11906edafc97bc378816b94585fbd9a054eabaf86fdd0ced94af449efab7" [[package]] name = "unicode-normalization" @@ -6815,9 +7039,9 @@ dependencies = [ [[package]] name = "unicode-segmentation" -version = "1.10.0" +version = "1.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fdbf052a0783de01e944a6ce7a8cb939e295b1e7be835a1112c3b9a7f047a5a" +checksum = "1dd624098567895118886609431a7c3b8f516e41d30e0643f03d94592a147e36" [[package]] name = "unicode-width" @@ -6859,9 +7083,9 @@ version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c2e7e85a0596447f0f2ac090e16bc4c516c6fe91771fb0c0ccf7fa3dae896b9c" dependencies = [ - "proc-macro2 1.0.51", + "proc-macro2 1.0.52", "quote 1.0.23", - "syn 1.0.107", + "syn 1.0.109", ] [[package]] @@ -6921,12 +7145,6 @@ dependencies = [ "libc", ] -[[package]] -name = "waker-fn" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d5b2c62b4012a3e1eca5a7e077d13b3bf498c4073e33ccd58626607748ceeca" - [[package]] name = "walkdir" version = "2.3.2" @@ -6968,9 +7186,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasm-bindgen" -version = "0.2.83" +version = "0.2.84" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eaf9f5aceeec8be17c128b2e93e031fb8a4d469bb9c4ae2d7dc1888b26887268" +checksum = "31f8dcbc21f30d9b8f2ea926ecb58f6b91192c17e9d33594b3df58b2007ca53b" dependencies = [ "cfg-if", "wasm-bindgen-macro", @@ -6978,24 +7196,24 @@ dependencies = [ [[package]] name = "wasm-bindgen-backend" -version = "0.2.83" +version = "0.2.84" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c8ffb332579b0557b52d268b91feab8df3615f265d5270fec2a8c95b17c1142" +checksum = "95ce90fd5bcc06af55a641a86428ee4229e44e07033963a2290a8e241607ccb9" dependencies = [ "bumpalo", "log", "once_cell", - "proc-macro2 1.0.51", + "proc-macro2 1.0.52", "quote 1.0.23", - "syn 1.0.107", + "syn 1.0.109", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-futures" -version = "0.4.33" +version = "0.4.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23639446165ca5a5de86ae1d8896b737ae80319560fbaa4c2887b7da6e7ebd7d" +checksum = "f219e0d211ba40266969f6dbdd90636da12f75bee4fc9d6c23d1260dadb51454" dependencies = [ "cfg-if", "js-sys", @@ -7005,9 +7223,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.83" +version = "0.2.84" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "052be0f94026e6cbc75cdefc9bae13fd6052cdcaf532fa6c45e7ae33a1e6c810" +checksum = "4c21f77c0bedc37fd5dc21f897894a5ca01e7bb159884559461862ae90c0b4c5" dependencies = [ "quote 1.0.23", "wasm-bindgen-macro-support", @@ -7015,22 +7233,22 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.83" +version = "0.2.84" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07bc0c051dc5f23e307b13285f9d75df86bfdf816c5721e573dec1f9b8aa193c" +checksum = "2aff81306fcac3c7515ad4e177f521b5c9a15f2b08f4e32d823066102f35a5f6" dependencies = [ - "proc-macro2 1.0.51", + "proc-macro2 1.0.52", "quote 1.0.23", - "syn 1.0.107", + "syn 1.0.109", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.83" +version = "0.2.84" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c38c045535d93ec4f0b4defec448e4291638ee608530863b1e2ba115d4fff7f" +checksum = "0046fef7e28c3804e5e38bfa31ea2a0f73905319b677e57ebe37e49358989b5d" [[package]] name = "wasm-timer" @@ -7049,9 +7267,9 @@ dependencies = [ [[package]] name = "web-sys" -version = "0.3.60" +version = "0.3.61" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bcda906d8be16e728fd5adc5b729afad4e444e106ab28cd1c7256e54fa61510f" +checksum = "e33b99f4b23ba3eec1a53ac264e35a755f00e966e0065077d6027c0f575b0b97" dependencies = [ "js-sys", "wasm-bindgen", @@ -7200,6 +7418,15 @@ version = "0.42.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "447660ad36a13288b1db4d4248e857b510e8c3a225c822ba4fb748c0aafecffd" +[[package]] +name = "winnow" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c95fb4ff192527911dd18eb138ac30908e7165b8944e528b6af93aa4c842d345" +dependencies = [ + "memchr", +] + [[package]] name = "winreg" version = "0.10.1" @@ -7267,8 +7494,8 @@ version = "1.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "44bf07cb3e50ea2003396695d58bf46bc9887a1f362260446fad6bc4e79bd36c" dependencies = [ - "proc-macro2 1.0.51", + "proc-macro2 1.0.52", "quote 1.0.23", - "syn 1.0.107", + "syn 1.0.109", "synstructure", ] diff --git a/Cargo.toml b/Cargo.toml index c94e7294bff..0da5e418d6e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,7 +1,8 @@ [workspace] members = [ "bin/reth", - "crates/consensus", + "crates/consensus/beacon", + "crates/consensus/common", "crates/executor", "crates/interfaces", "crates/metrics/metrics-derive", @@ -41,6 +42,11 @@ members = [ exclude = ["crate-template"] default-members = ["bin/reth"] +# Like release, but with full debug symbols. Useful for e.g. `perf`. +[profile.debug-fast] +inherits = "release" +debug = true + [patch.crates-io] revm = { git = "https://github.com/bluealloy/revm" } revm-primitives = { git = "https://github.com/bluealloy/revm" } diff --git a/bin/reth/Cargo.toml b/bin/reth/Cargo.toml index fea91f9e8b1..ff1aa0aed3a 100644 --- a/bin/reth/Cargo.toml +++ b/bin/reth/Cargo.toml @@ -9,23 +9,22 @@ readme = "README.md" [dependencies] # reth reth-primitives = { path = "../../crates/primitives", features = ["arbitrary"] } -reth-db = {path = "../../crates/storage/db", features = ["mdbx", "test-utils"] } +reth-db = { path = "../../crates/storage/db", features = ["mdbx", "test-utils"] } # TODO: Temporary use of the test-utils feature reth-provider = { path = "../../crates/storage/provider", features = ["test-utils"] } reth-staged-sync = { path = "../../crates/staged-sync" } reth-stages = { path = "../../crates/stages"} reth-interfaces = { path = "../../crates/interfaces", features = ["test-utils"] } reth-transaction-pool = { path = "../../crates/transaction-pool", features = ["test-utils"] } -reth-consensus = { path = "../../crates/consensus" } +reth-beacon-consensus = { path = "../../crates/consensus/beacon" } reth-executor = { path = "../../crates/executor" } -reth-eth-wire = { path = "../../crates/net/eth-wire" } reth-rpc-engine-api = { path = "../../crates/rpc/rpc-engine-api" } reth-rpc-builder = { path = "../../crates/rpc/rpc-builder" } reth-rpc = { path = "../../crates/rpc/rpc" } reth-rlp = { path = "../../crates/rlp" } -reth-network = {path = "../../crates/net/network", features = ["serde"] } -reth-network-api = {path = "../../crates/net/network-api" } -reth-downloaders = {path = "../../crates/net/downloaders", features = ["test-utils"] } +reth-network = { path = "../../crates/net/network", features = ["serde"] } +reth-network-api = { path = "../../crates/net/network-api" } +reth-downloaders = { path = "../../crates/net/downloaders", features = ["test-utils"] } reth-tracing = { path = "../../crates/tracing" } reth-tasks = { path = "../../crates/tasks" } reth-net-nat = { path = "../../crates/net/nat" } @@ -36,7 +35,6 @@ tracing = "0.1" # io fdlimit = "0.2.1" -walkdir = "2.3" serde = "1.0" serde_json = "1.0" shellexpand = "3.0.0" @@ -44,7 +42,6 @@ dirs-next = "2.0.0" confy = "0.5" # rpc/metrics -metrics = "0.20.1" metrics-exporter-prometheus = { version = "0.11.0", features = ["http-listener"] } metrics-util = "0.14.0" @@ -53,10 +50,8 @@ proptest = "1.0" # misc eyre = "0.6.8" -clap = { version = "4.0", features = ["derive", "cargo"] } -thiserror = "1.0" +clap = { version = "4", features = ["derive", "cargo"] } tokio = { version = "1.21", features = ["sync", "macros", "rt-multi-thread"] } -tokio-stream = "0.1" futures = "0.3.25" tempfile = { version = "3.3.0" } backon = "0.4" @@ -64,4 +59,4 @@ comfy-table = "6.1.4" crossterm = "0.25.0" tui = "0.19.0" jsonrpsee = { version = "0.16", features = ["server"] } -human_bytes = "0.4.1" \ No newline at end of file +human_bytes = "0.4.1" diff --git a/bin/reth/src/args/network_args.rs b/bin/reth/src/args/network_args.rs index 6bc3212df0d..929101f5c33 100644 --- a/bin/reth/src/args/network_args.rs +++ b/bin/reth/src/args/network_args.rs @@ -2,12 +2,11 @@ use crate::dirs::{KnownPeersPath, PlatformPath}; use clap::Args; -use reth_discv4::bootnodes::mainnet_nodes; use reth_net_nat::NatResolver; use reth_network::NetworkConfigBuilder; -use reth_primitives::{ChainSpec, NodeRecord}; +use reth_primitives::{mainnet_nodes, ChainSpec, NodeRecord}; use reth_staged_sync::Config; -use std::path::PathBuf; +use std::{path::PathBuf, sync::Arc}; /// Parameters for configuring the network more granularity via CLI #[derive(Debug, Args)] @@ -50,11 +49,16 @@ pub struct NetworkArgs { impl NetworkArgs { /// Build a [`NetworkConfigBuilder`] from a [`Config`] and a [`ChainSpec`], in addition to the /// values in this option struct. - pub fn network_config(&self, config: &Config, chain_spec: ChainSpec) -> NetworkConfigBuilder { - let peers_file = (!self.no_persist_peers).then_some(&self.peers_file); + pub fn network_config( + &self, + config: &Config, + chain_spec: Arc, + ) -> NetworkConfigBuilder { + let chain_bootnodes = chain_spec.chain.bootnodes().unwrap_or_else(mainnet_nodes); + let network_config_builder = config - .network_config(self.nat, peers_file.map(|f| f.as_ref().to_path_buf())) - .boot_nodes(self.bootnodes.clone().unwrap_or_else(mainnet_nodes)) + .network_config(self.nat, self.persistent_peers_file()) + .boot_nodes(self.bootnodes.clone().unwrap_or(chain_bootnodes)) .chain_spec(chain_spec); self.discovery.apply_to_builder(network_config_builder) @@ -87,6 +91,10 @@ pub struct DiscoveryArgs { /// Disable Discv4 discovery. #[arg(long, conflicts_with = "disable_discovery")] disable_discv4_discovery: bool, + + /// The UDP port to use for P2P discovery/networking. + #[arg(long = "discovery.port")] + pub port: Option, } impl DiscoveryArgs { diff --git a/bin/reth/src/args/rpc_server_args.rs b/bin/reth/src/args/rpc_server_args.rs index 0afb2a016ea..5c593b84e6f 100644 --- a/bin/reth/src/args/rpc_server_args.rs +++ b/bin/reth/src/args/rpc_server_args.rs @@ -11,6 +11,7 @@ use reth_rpc_builder::{ RpcServerHandle, ServerBuilder, TransportRpcModuleConfig, }; use reth_rpc_engine_api::EngineApiHandle; +use reth_tasks::TaskSpawner; use reth_transaction_pool::TransactionPool; use std::{ net::{IpAddr, Ipv4Addr, SocketAddr}, @@ -103,11 +104,12 @@ impl RpcServerArgs { } /// Convenience function for starting a rpc server with configs which extracted from cli args. - pub(crate) async fn start_rpc_server( + pub(crate) async fn start_rpc_server( &self, client: Client, pool: Pool, network: Network, + executor: Tasks, ) -> Result where Client: BlockProvider @@ -115,9 +117,11 @@ impl RpcServerArgs { + StateProviderFactory + EvmEnvProvider + Clone + + Unpin + 'static, Pool: TransactionPool + Clone + 'static, Network: NetworkInfo + Peers + Clone + 'static, + Tasks: TaskSpawner + Clone + 'static, { reth_rpc_builder::launch( client, @@ -125,16 +129,18 @@ impl RpcServerArgs { network, self.transport_rpc_module_config(), self.rpc_server_config(), + executor, ) .await } /// Create Engine API server. - pub(crate) async fn start_auth_server( + pub(crate) async fn start_auth_server( &self, client: Client, pool: Pool, network: Network, + executor: Tasks, handle: EngineApiHandle, ) -> Result where @@ -143,16 +149,27 @@ impl RpcServerArgs { + StateProviderFactory + EvmEnvProvider + Clone + + Unpin + 'static, Pool: TransactionPool + Clone + 'static, Network: NetworkInfo + Peers + Clone + 'static, + Tasks: TaskSpawner + Clone + 'static, { let socket_address = SocketAddr::new( self.auth_addr.unwrap_or(IpAddr::V4(Ipv4Addr::UNSPECIFIED)), self.auth_port.unwrap_or(constants::DEFAULT_AUTH_PORT), ); let secret = self.jwt_secret().map_err(|err| RpcError::Custom(err.to_string()))?; - reth_rpc_builder::auth::launch(client, pool, network, handle, socket_address, secret).await + reth_rpc_builder::auth::launch( + client, + pool, + network, + executor, + handle, + socket_address, + secret, + ) + .await } /// Creates the [TransportRpcModuleConfig] from cli args. @@ -189,7 +206,7 @@ impl RpcServerArgs { if self.ws { let socket_address = SocketAddr::new( self.ws_addr.unwrap_or(IpAddr::V4(Ipv4Addr::UNSPECIFIED)), - self.ws_port.unwrap_or(constants::DEFAULT_HTTP_RPC_PORT), + self.ws_port.unwrap_or(constants::DEFAULT_WS_RPC_PORT), ); config = config.with_ws_address(socket_address).with_http(ServerBuilder::new()); } diff --git a/bin/reth/src/chain/import.rs b/bin/reth/src/chain/import.rs index f93228a3f6a..4cd314e2276 100644 --- a/bin/reth/src/chain/import.rs +++ b/bin/reth/src/chain/import.rs @@ -5,18 +5,16 @@ use crate::{ use clap::{crate_version, Parser}; use eyre::Context; use futures::{Stream, StreamExt}; -use reth_consensus::beacon::BeaconConsensus; +use reth_beacon_consensus::BeaconConsensus; use reth_db::mdbx::{Env, WriteMap}; use reth_downloaders::{ bodies::bodies::BodiesDownloaderBuilder, headers::reverse_headers::ReverseHeadersDownloaderBuilder, test_utils::FileClient, }; use reth_interfaces::{ - consensus::{Consensus, ForkchoiceState}, - p2p::headers::client::NoopStatusUpdater, - sync::SyncStateUpdater, + consensus::Consensus, p2p::headers::client::NoopStatusUpdater, sync::SyncStateUpdater, }; -use reth_primitives::ChainSpec; +use reth_primitives::{ChainSpec, H256}; use reth_staged_sync::{ utils::{ chainspec::genesis_value_parser, @@ -26,9 +24,10 @@ use reth_staged_sync::{ }; use reth_stages::{ prelude::*, - stages::{ExecutionStage, SenderRecoveryStage, TotalDifficultyStage}, + stages::{ExecutionStage, HeaderSyncMode, SenderRecoveryStage, TotalDifficultyStage}, }; use std::sync::Arc; +use tokio::sync::watch; use tracing::{debug, info}; /// Syncs RLP encoded blocks from a file. @@ -63,7 +62,7 @@ pub struct ImportCommand { default_value = "mainnet", value_parser = genesis_value_parser )] - chain: ChainSpec, + chain: Arc, /// The path to a block file for import. /// @@ -89,6 +88,9 @@ impl ImportCommand { init_genesis(db.clone(), self.chain.clone())?; + let consensus = Arc::new(BeaconConsensus::new(self.chain.clone())); + info!(target: "reth::cli", "Consensus engine initialized"); + // create a new FileClient info!(target: "reth::cli", "Importing chain file"); let file_client = Arc::new(FileClient::new(&self.path).await?); @@ -97,18 +99,13 @@ impl ImportCommand { let tip = file_client.tip().expect("file client has no tip"); info!(target: "reth::cli", "Chain file imported"); - let (consensus, notifier) = BeaconConsensus::builder().build(self.chain.clone()); - debug!(target: "reth::cli", %tip, "Tip manually set"); - notifier.send(ForkchoiceState { - head_block_hash: tip, - safe_block_hash: tip, - finalized_block_hash: tip, - })?; - info!(target: "reth::cli", "Consensus engine initialized"); - let (mut pipeline, events) = self.build_import_pipeline(config, db.clone(), &consensus, file_client).await?; + // override the tip + pipeline.set_tip(tip); + debug!(target: "reth::cli", %tip, "Tip manually set"); + tokio::spawn(handle_events(None, events)); // Run pipeline @@ -140,26 +137,29 @@ impl ImportCommand { .build(file_client.clone(), consensus.clone(), db) .into_task(); + let (tip_tx, tip_rx) = watch::channel(H256::zero()); + let factory = reth_executor::Factory::new(self.chain.clone()); + let mut pipeline = Pipeline::builder() + .with_tip_sender(tip_tx) .with_sync_state_updater(file_client) .add_stages( DefaultStages::new( + HeaderSyncMode::Tip(tip_rx), consensus.clone(), header_downloader, body_downloader, NoopStatusUpdater::default(), + factory.clone(), + ) + .set( + TotalDifficultyStage::new(consensus.clone()) + .with_commit_threshold(config.stages.total_difficulty.commit_threshold), ) - .set(TotalDifficultyStage { - chain_spec: self.chain.clone(), - commit_threshold: config.stages.total_difficulty.commit_threshold, - }) .set(SenderRecoveryStage { commit_threshold: config.stages.sender_recovery.commit_threshold, }) - .set(ExecutionStage { - chain_spec: self.chain.clone(), - commit_threshold: config.stages.execution.commit_threshold, - }), + .set(ExecutionStage::new(factory, config.stages.execution.commit_threshold)), ) .with_max_block(0) .build(); diff --git a/bin/reth/src/chain/init.rs b/bin/reth/src/chain/init.rs index e8eb32afebe..0d3f395634f 100644 --- a/bin/reth/src/chain/init.rs +++ b/bin/reth/src/chain/init.rs @@ -36,30 +36,22 @@ pub struct InitCommand { default_value = "mainnet", value_parser = genesis_value_parser )] - chain: ChainSpec, + chain: Arc, } impl InitCommand { /// Execute the `init` command pub async fn execute(&self) -> eyre::Result<()> { - info!(target: "reth::cli", "reth import starting"); + info!(target: "reth::cli", "reth init starting"); info!(target: "reth::cli", path = %self.db, "Opening database"); let db = Arc::new(init_db(&self.db)?); info!(target: "reth::cli", "Database opened"); info!(target: "reth::cli", "Writing genesis block"); - let genesis_hash = init_genesis(db, self.chain.clone())?; - - if genesis_hash != self.chain.genesis_hash() { - // TODO: better error text - return Err(eyre::eyre!( - "Genesis hash mismatch: expected {}, got {}", - self.chain.genesis_hash(), - genesis_hash - )) - } + let hash = init_genesis(db, self.chain.clone())?; + info!(target: "reth::cli", hash = ?hash, "Genesis block written"); Ok(()) } } diff --git a/bin/reth/src/cli.rs b/bin/reth/src/cli.rs index e984a0b9371..4afa9093921 100644 --- a/bin/reth/src/cli.rs +++ b/bin/reth/src/cli.rs @@ -38,7 +38,7 @@ pub fn run() -> eyre::Result<()> { } /// Commands to be executed -#[derive(Subcommand)] +#[derive(Debug, Subcommand)] pub enum Commands { /// Start the node #[command(name = "node")] @@ -74,7 +74,7 @@ pub enum Commands { TestVectors(test_vectors::Command), } -#[derive(Parser)] +#[derive(Debug, Parser)] #[command(author, version = "0.1", about = "Reth", long_about = None)] struct Cli { /// The command to run @@ -88,9 +88,10 @@ struct Cli { verbosity: Verbosity, } -#[derive(Args)] +/// The log configuration. +#[derive(Debug, Args)] #[command(next_help_heading = "Logging")] -struct Logs { +pub struct Logs { /// The path to put log files in. #[arg( long = "log.directory", @@ -112,7 +113,7 @@ struct Logs { impl Logs { /// Builds a tracing layer from the current log options. - fn layer(&self) -> (BoxedLayer, Option) + pub fn layer(&self) -> (BoxedLayer, Option) where S: Subscriber, for<'a> S: LookupSpan<'a>, @@ -129,9 +130,10 @@ impl Logs { } } -#[derive(Args)] +/// The verbosity settings for the cli. +#[derive(Debug, Copy, Clone, Args)] #[command(next_help_heading = "Display")] -struct Verbosity { +pub struct Verbosity { /// Set the minimum log level. /// /// -v Errors @@ -150,7 +152,7 @@ struct Verbosity { impl Verbosity { /// Get the corresponding [Directive] for the given verbosity, or none if the verbosity /// corresponds to silent. - fn directive(&self) -> Directive { + pub fn directive(&self) -> Directive { if self.quiet { LevelFilter::OFF.into() } else { @@ -166,3 +168,23 @@ impl Verbosity { } } } + +#[cfg(test)] +mod tests { + use super::*; + use clap::CommandFactory; + + /// Tests that the help message is parsed correctly. This ensures that clap args are configured + /// correctly and no conflicts are introduced via attributes that would result in a panic at + /// runtime + #[test] + fn test_parse_help_all_subcommands() { + let reth = Cli::command(); + for sub_command in reth.get_subcommands() { + let err = Cli::try_parse_from(["reth", sub_command.get_name(), "--help"]).unwrap_err(); + // --help is treated as error, but + // > Not a true "error" as it means --help or similar was used. The help message will be sent to stdout. + assert_eq!(err.kind(), clap::error::ErrorKind::DisplayHelp); + } + } +} diff --git a/bin/reth/src/db/mod.rs b/bin/reth/src/db/mod.rs index 6a9ccd48f41..a0830ca13f3 100644 --- a/bin/reth/src/db/mod.rs +++ b/bin/reth/src/db/mod.rs @@ -29,7 +29,7 @@ pub struct Command { /// - Linux: `$XDG_DATA_HOME/reth/db` or `$HOME/.local/share/reth/db` /// - Windows: `{FOLDERID_RoamingAppData}/reth/db` /// - macOS: `$HOME/Library/Application Support/reth/db` - #[arg(long, value_name = "PATH", verbatim_doc_comment, default_value_t)] + #[arg(global = true, long, value_name = "PATH", verbatim_doc_comment, default_value_t)] db: PlatformPath, #[clap(subcommand)] @@ -151,8 +151,10 @@ impl Command { ); return Ok(()); } - let map = tool.list::($start, $len)?; - tui::DbListTUI::::show_tui(map, $start, total_entries) + + tui::DbListTUI::<_, tables::$table>::new(|start, count| { + tool.list::(start, count).unwrap() + }, $start, $len, total_entries).run() })?? },)* _ => { @@ -176,7 +178,6 @@ impl Command { BlockTransitionIndex, TxTransitionIndex, SyncStage, - TxHashNumber, Transactions ]); } @@ -206,8 +207,8 @@ impl<'a, DB: Database> DbTool<'a, DB> { let chain = random_block_range(0..len, Default::default(), 0..64); self.db.update(|tx| { - chain.iter().try_for_each(|block| { - insert_canonical_block(tx, block, true)?; + chain.into_iter().try_for_each(|block| { + insert_canonical_block(tx, block, None, true)?; Ok::<_, eyre::Error>(()) }) })??; diff --git a/bin/reth/src/db/tui.rs b/bin/reth/src/db/tui.rs index 1121d1af683..c4ceaf9e868 100644 --- a/bin/reth/src/db/tui.rs +++ b/bin/reth/src/db/tui.rs @@ -19,7 +19,14 @@ use tui::{ }; /// Available keybindings for the [DbListTUI] -static CMDS: [(&str, &str); 3] = [("q", "Quit"), ("up", "Entry Above"), ("down", "Entry Below")]; +static CMDS: [(&str, &str); 6] = [ + ("q", "Quit"), + ("↑", "Entry above"), + ("↓", "Entry below"), + ("←", "Previous page"), + ("→", "Next page"), + ("G", "Go to a specific page"), +]; /// Modified version of the [ListState] struct that exposes the `offset` field. /// Used to make the [DbListTUI] keys clickable. @@ -27,26 +34,62 @@ struct ExpListState { pub(crate) offset: usize, } +#[derive(Default, Eq, PartialEq)] +pub(crate) enum ViewMode { + /// Normal list view mode + #[default] + Normal, + /// Currently wanting to go to a page + GoToPage, +} + #[derive(Default)] -pub(crate) struct DbListTUI { - /// The state of the key list. - pub(crate) state: ListState, +pub(crate) struct DbListTUI +where + F: FnMut(usize, usize) -> BTreeMap, +{ + /// Fetcher for the next page of items. + /// + /// The fetcher is passed the index of the first item to fetch, and the number of items to + /// fetch from that item. + fetch: F, /// The starting index of the key list in the DB. - pub(crate) start: usize, + start: usize, + /// The amount of entries to show per page + count: usize, /// The total number of entries in the database - pub(crate) total_entries: usize, + total_entries: usize, + /// The current view mode + mode: ViewMode, + /// The current state of the input buffer + input: String, + /// The state of the key list. + list_state: ListState, /// Entries to show in the TUI. - pub(crate) entries: BTreeMap, + entries: BTreeMap, } -impl DbListTUI { - fn new(entries: BTreeMap, start: usize, total_entries: usize) -> Self { - Self { state: ListState::default(), start, total_entries, entries } +impl DbListTUI +where + F: FnMut(usize, usize) -> BTreeMap, +{ + /// Create a new database list TUI + pub(crate) fn new(fetch: F, start: usize, count: usize, total_entries: usize) -> Self { + Self { + fetch, + start, + count, + total_entries, + mode: ViewMode::Normal, + input: String::new(), + list_state: ListState::default(), + entries: BTreeMap::new(), + } } /// Move to the next list selection fn next(&mut self) { - let i = match self.state.selected() { + let i = match self.list_state.selected() { Some(i) => { if i >= self.entries.len() - 1 { 0 @@ -56,12 +99,12 @@ impl DbListTUI { } None => 0, }; - self.state.select(Some(i)); + self.list_state.select(Some(i)); } /// Move to the previous list selection fn previous(&mut self) { - let i = match self.state.selected() { + let i = match self.list_state.selected() { Some(i) => { if i == 0 { self.entries.len() - 1 @@ -71,89 +114,177 @@ impl DbListTUI { } None => 0, }; - self.state.select(Some(i)); + self.list_state.select(Some(i)); + } + + fn reset(&mut self) { + self.list_state.select(Some(0)); + } + + /// Fetch the next page of items + fn next_page(&mut self) { + if self.start + self.count >= self.total_entries { + return + } + + self.start += self.count; + self.fetch_page(); + } + + /// Fetch the previous page of items + fn previous_page(&mut self) { + if self.start == 0 { + return + } + + self.start -= self.count; + self.fetch_page(); + } + + /// Go to a specific page. + fn go_to_page(&mut self, page: usize) { + self.start = (self.count * page).min(self.total_entries - self.count); + self.fetch_page(); + } + + /// Fetch the current page + fn fetch_page(&mut self) { + self.entries = (self.fetch)(self.start, self.count); + self.reset(); } /// Show the [DbListTUI] in the terminal. - pub(crate) fn show_tui( - entries: BTreeMap, - start: usize, - total_entries: usize, - ) -> eyre::Result<()> { - // setup terminal + pub(crate) fn run(mut self) -> eyre::Result<()> { + // Setup backend enable_raw_mode()?; let mut stdout = io::stdout(); execute!(stdout, EnterAlternateScreen, EnableMouseCapture)?; let backend = CrosstermBackend::new(stdout); let mut terminal = Terminal::new(backend)?; - // create app and run it + // Load initial page + self.fetch_page(); + + // Run event loop let tick_rate = Duration::from_millis(250); - let mut app = DbListTUI::::new(entries, start, total_entries); - app.state.select(Some(0)); - let res = run(&mut terminal, app, tick_rate); + let res = event_loop(&mut terminal, &mut self, tick_rate); - // restore terminal + // Restore terminal disable_raw_mode()?; execute!(terminal.backend_mut(), LeaveAlternateScreen, DisableMouseCapture)?; terminal.show_cursor()?; + // Handle errors if let Err(err) = res { error!("{:?}", err) } - Ok(()) } } -fn run( +/// Run the event loop +fn event_loop( terminal: &mut Terminal, - mut app: DbListTUI, + app: &mut DbListTUI, tick_rate: Duration, -) -> io::Result<()> { +) -> io::Result<()> +where + F: FnMut(usize, usize) -> BTreeMap, +{ let mut last_tick = Instant::now(); - loop { - terminal.draw(|f| ui(f, &mut app))?; + let mut running = true; + while running { + // Render + terminal.draw(|f| ui(f, app))?; + // Calculate timeout let timeout = tick_rate.checked_sub(last_tick.elapsed()).unwrap_or_else(|| Duration::from_secs(0)); + + // Poll events if crossterm::event::poll(timeout)? { - match event::read()? { - Event::Key(key) => match key.code { - KeyCode::Char('q') | KeyCode::Char('Q') => return Ok(()), - KeyCode::Down => app.next(), - KeyCode::Up => app.previous(), - _ => {} - }, - Event::Mouse(e) => match e.kind { - MouseEventKind::ScrollDown => app.next(), - MouseEventKind::ScrollUp => app.previous(), - // TODO: This click event can be triggered outside of the list widget. - MouseEventKind::Down(_) => { - // SAFETY: The pointer to the app's state will always be valid for - // reads here, and the source is larger than the destination. - // - // This is technically unsafe, but because the alignment requirements - // in both the source and destination are the same and we can ensure - // that the pointer to `app.state` is valid for reads, this is safe. - let state: ExpListState = unsafe { std::mem::transmute_copy(&app.state) }; - let new_idx = (e.row as usize + state.offset).saturating_sub(1); - if new_idx < app.entries.len() { - app.state.select(Some(new_idx)); - } - } - _ => {} - }, - _ => {} - } + running = !handle_event(app, event::read()?)?; } + if last_tick.elapsed() >= tick_rate { last_tick = Instant::now(); } } + + Ok(()) +} + +/// Handle incoming events +fn handle_event(app: &mut DbListTUI, event: Event) -> io::Result +where + F: FnMut(usize, usize) -> BTreeMap, +{ + if app.mode == ViewMode::GoToPage { + if let Event::Key(key) = event { + match key.code { + KeyCode::Enter => { + let input = std::mem::take(&mut app.input); + if let Ok(page) = input.parse() { + app.go_to_page(page); + } + app.mode = ViewMode::Normal; + } + KeyCode::Char(c) => { + app.input.push(c); + } + KeyCode::Backspace => { + app.input.pop(); + } + KeyCode::Esc => app.mode = ViewMode::Normal, + _ => {} + } + } + + return Ok(false) + } + + match event { + Event::Key(key) => match key.code { + KeyCode::Char('q') | KeyCode::Char('Q') => return Ok(true), + KeyCode::Down => app.next(), + KeyCode::Up => app.previous(), + KeyCode::Right => app.next_page(), + KeyCode::Left => app.previous_page(), + KeyCode::Char('G') => { + app.mode = ViewMode::GoToPage; + } + _ => {} + }, + Event::Mouse(e) => match e.kind { + MouseEventKind::ScrollDown => app.next(), + MouseEventKind::ScrollUp => app.previous(), + // TODO: This click event can be triggered outside of the list widget. + MouseEventKind::Down(_) => { + // SAFETY: The pointer to the app's state will always be valid for + // reads here, and the source is larger than the destination. + // + // This is technically unsafe, but because the alignment requirements + // in both the source and destination are the same and we can ensure + // that the pointer to `app.state` is valid for reads, this is safe. + let state: ExpListState = unsafe { std::mem::transmute_copy(&app.list_state) }; + let new_idx = (e.row as usize + state.offset).saturating_sub(1); + if new_idx < app.entries.len() { + app.list_state.select(Some(new_idx)); + } + } + _ => {} + }, + _ => {} + } + + Ok(false) } -fn ui(f: &mut Frame<'_, B>, app: &mut DbListTUI) { +/// Render the UI +fn ui(f: &mut Frame<'_, B>, app: &mut DbListTUI) +where + F: FnMut(usize, usize) -> BTreeMap, +{ let outer_chunks = Layout::default() .direction(Direction::Vertical) .constraints([Constraint::Percentage(95), Constraint::Percentage(5)].as_ref()) @@ -166,16 +297,19 @@ fn ui(f: &mut Frame<'_, B>, app: &mut DbListTUI) { .constraints([Constraint::Percentage(50), Constraint::Percentage(50)]) .split(outer_chunks[0]); + let key_length = format!("{}", app.start + app.count - 1).len(); let formatted_keys = app .entries .keys() .enumerate() - .map(|(i, k)| ListItem::new(format!("[{}] - {k:?}", i + app.start))) + .map(|(i, k)| { + ListItem::new(format!("[{:0>width$}]: {k:?}", i + app.start, width = key_length)) + }) .collect::>>(); let key_list = List::new(formatted_keys) .block(Block::default().borders(Borders::ALL).title(format!( - "Keys (Showing range [{}, {}] out of {} entries)", + "Keys (Showing entries {}-{} out of {} entries)", app.start, app.start + app.entries.len() - 1, app.total_entries @@ -184,13 +318,18 @@ fn ui(f: &mut Frame<'_, B>, app: &mut DbListTUI) { .highlight_style(Style::default().fg(Color::Cyan).add_modifier(Modifier::ITALIC)) .highlight_symbol("➜ ") .start_corner(Corner::TopLeft); - f.render_stateful_widget(key_list, inner_chunks[0], &mut app.state); + f.render_stateful_widget(key_list, inner_chunks[0], &mut app.list_state); + let values = app.entries.values().collect::>(); let value_display = Paragraph::new( - serde_json::to_string_pretty( - &app.entries.values().collect::>()[app.state.selected().unwrap_or(0)], - ) - .unwrap_or_else(|_| String::from("Error serializing value!")), + app.list_state + .selected() + .and_then(|selected| values.get(selected)) + .map(|entry| { + serde_json::to_string_pretty(entry) + .unwrap_or(String::from("Error serializing value")) + }) + .unwrap_or("No value selected".to_string()), ) .block(Block::default().borders(Borders::ALL).title("Value (JSON)")) .wrap(Wrap { trim: false }) @@ -199,11 +338,21 @@ fn ui(f: &mut Frame<'_, B>, app: &mut DbListTUI) { } // Footer - let footer = Paragraph::new( - CMDS.iter().map(|(k, v)| format!("[{k}] {v}")).collect::>().join(" | "), - ) + let footer = match app.mode { + ViewMode::Normal => Paragraph::new( + CMDS.iter().map(|(k, v)| format!("[{k}] {v}")).collect::>().join(" | "), + ), + ViewMode::GoToPage => Paragraph::new(format!( + "Go to page (max {}): {}", + app.total_entries / app.count, + app.input + )), + } .block(Block::default().borders(Borders::ALL)) - .alignment(Alignment::Center) + .alignment(match app.mode { + ViewMode::Normal => Alignment::Center, + ViewMode::GoToPage => Alignment::Left, + }) .style(Style::default().fg(Color::Cyan).add_modifier(Modifier::BOLD)); f.render_widget(footer, outer_chunks[1]); } diff --git a/bin/reth/src/dirs.rs b/bin/reth/src/dirs.rs index b1a8aa575c5..45d61dea89d 100644 --- a/bin/reth/src/dirs.rs +++ b/bin/reth/src/dirs.rs @@ -44,11 +44,18 @@ pub fn logs_dir() -> Option { /// Returns the path to the reth jwtsecret directory. /// -/// Refer to [dirs_next::cache_dir] for cross-platform behavior. +/// Refer to [dirs_next::data_dir] for cross-platform behavior. pub fn jwt_secret_dir() -> Option { data_dir().map(|root| root.join("jwtsecret")) } +/// Returns the path to the reth net directory. +/// +/// Refer to [dirs_next::data_dir] +pub fn net_dir() -> Option { + data_dir().map(|root| root.join("net")) +} + /// Returns the path to the reth database. /// /// Refer to [dirs_next::data_dir] for cross-platform behavior. @@ -90,14 +97,14 @@ impl XdgPath for ConfigPath { /// Returns the path to the default reth known peers file. /// -/// Refer to [dirs_next::config_dir] for cross-platform behavior. +/// Refer to [dirs_next::data_dir] for cross-platform behavior. #[derive(Default, Debug, Clone)] #[non_exhaustive] pub struct KnownPeersPath; impl XdgPath for KnownPeersPath { fn resolve() -> Option { - database_path().map(|p| p.join("known-peers.json")) + net_dir().map(|p| p.join("known-peers.json")) } } @@ -116,7 +123,8 @@ impl XdgPath for LogsDir { /// A small helper trait for unit structs that represent a standard path following the XDG /// path specification. -trait XdgPath { +pub trait XdgPath { + /// Resolve the [XdgPath] into a [PathBuf]. fn resolve() -> Option; } diff --git a/bin/reth/src/dump_stage/execution.rs b/bin/reth/src/dump_stage/execution.rs index 49d6de7de35..7e164d9eb45 100644 --- a/bin/reth/src/dump_stage/execution.rs +++ b/bin/reth/src/dump_stage/execution.rs @@ -7,9 +7,10 @@ use eyre::Result; use reth_db::{ cursor::DbCursorRO, database::Database, table::TableImporter, tables, transaction::DbTx, }; +use reth_primitives::MAINNET; use reth_provider::Transaction; use reth_stages::{stages::ExecutionStage, Stage, StageId, UnwindInput}; -use std::ops::DerefMut; +use std::{ops::DerefMut, sync::Arc}; use tracing::info; pub(crate) async fn dump_execution_stage( @@ -96,7 +97,10 @@ async fn unwind_and_copy( output_db: &reth_db::mdbx::Env, ) -> eyre::Result<()> { let mut unwind_tx = Transaction::new(db_tool.db)?; - let mut exec_stage = ExecutionStage::default(); + + let mut exec_stage = ExecutionStage::new_default_threshold(reth_executor::Factory::new( + Arc::new(MAINNET.clone()), + )); exec_stage .unwind( @@ -125,7 +129,9 @@ async fn dry_run( info!(target: "reth::cli", "Executing stage. [dry-run]"); let mut tx = Transaction::new(&output_db)?; - let mut exec_stage = ExecutionStage::default(); + let mut exec_stage = ExecutionStage::new_default_threshold(reth_executor::Factory::new( + Arc::new(MAINNET.clone()), + )); exec_stage .execute( diff --git a/bin/reth/src/dump_stage/hashing_account.rs b/bin/reth/src/dump_stage/hashing_account.rs index e5f6ccf0197..5f43ce61abd 100644 --- a/bin/reth/src/dump_stage/hashing_account.rs +++ b/bin/reth/src/dump_stage/hashing_account.rs @@ -36,8 +36,7 @@ pub(crate) async fn dump_hashing_account_stage( unwind_and_copy::(db_tool, from, tip_block_number, &output_db).await?; if should_run { - println!("\n# AccountHashing stage does not support dry run, so it will actually be committing changes."); - run(output_db, to, from).await?; + dry_run(output_db, to, from).await?; } Ok(()) @@ -69,7 +68,7 @@ async fn unwind_and_copy( } /// Try to re-execute the stage straightaway -async fn run( +async fn dry_run( output_db: reth_db::mdbx::Env, to: u64, from: u64, @@ -82,15 +81,21 @@ async fn run( ..Default::default() }; - exec_stage - .execute( - &mut tx, - reth_stages::ExecInput { - previous_stage: Some((StageId("Another"), to)), - stage_progress: Some(from), - }, - ) - .await?; + let mut exec_output = false; + while !exec_output { + exec_output = exec_stage + .execute( + &mut tx, + reth_stages::ExecInput { + previous_stage: Some((StageId("Another"), to)), + stage_progress: Some(from), + }, + ) + .await? + .done; + } + + tx.drop()?; info!(target: "reth::cli", "Success."); diff --git a/bin/reth/src/dump_stage/hashing_storage.rs b/bin/reth/src/dump_stage/hashing_storage.rs index 557c6a956d7..03cbb74a3f9 100644 --- a/bin/reth/src/dump_stage/hashing_storage.rs +++ b/bin/reth/src/dump_stage/hashing_storage.rs @@ -6,8 +6,9 @@ use crate::{ use eyre::Result; use reth_db::{database::Database, table::TableImporter, tables}; use reth_provider::Transaction; -use reth_stages::{stages::StorageHashingStage, Stage, UnwindInput}; +use reth_stages::{stages::StorageHashingStage, Stage, StageId, UnwindInput}; use std::ops::DerefMut; +use tracing::info; pub(crate) async fn dump_hashing_storage_stage( db_tool: &mut DbTool<'_, DB>, @@ -16,14 +17,14 @@ pub(crate) async fn dump_hashing_storage_stage( output_db: &PlatformPath, should_run: bool, ) -> Result<()> { - if should_run { - eyre::bail!("StorageHashing stage does not support dry run.") - } - let (output_db, tip_block_number) = setup::(from, to, output_db, db_tool)?; unwind_and_copy::(db_tool, from, tip_block_number, &output_db).await?; + if should_run { + dry_run(output_db, to, from).await?; + } + Ok(()) } @@ -53,3 +54,38 @@ async fn unwind_and_copy( Ok(()) } + +/// Try to re-execute the stage straightaway +async fn dry_run( + output_db: reth_db::mdbx::Env, + to: u64, + from: u64, +) -> eyre::Result<()> { + info!(target: "reth::cli", "Executing stage."); + + let mut tx = Transaction::new(&output_db)?; + let mut exec_stage = StorageHashingStage { + clean_threshold: 1, // Forces hashing from scratch + ..Default::default() + }; + + let mut exec_output = false; + while !exec_output { + exec_output = exec_stage + .execute( + &mut tx, + reth_stages::ExecInput { + previous_stage: Some((StageId("Another"), to)), + stage_progress: Some(from), + }, + ) + .await? + .done; + } + + tx.drop()?; + + info!(target: "reth::cli", "Success."); + + Ok(()) +} diff --git a/bin/reth/src/dump_stage/merkle.rs b/bin/reth/src/dump_stage/merkle.rs new file mode 100644 index 00000000000..6e6ce370ff3 --- /dev/null +++ b/bin/reth/src/dump_stage/merkle.rs @@ -0,0 +1,143 @@ +use crate::{ + db::DbTool, + dirs::{DbPath, PlatformPath}, + dump_stage::setup, +}; +use eyre::Result; +use reth_db::{database::Database, table::TableImporter, tables, transaction::DbTx}; +use reth_primitives::MAINNET; +use reth_provider::Transaction; +use reth_stages::{ + stages::{AccountHashingStage, ExecutionStage, MerkleStage, StorageHashingStage}, + Stage, StageId, UnwindInput, +}; +use std::{ops::DerefMut, sync::Arc}; +use tracing::info; + +pub(crate) async fn dump_merkle_stage( + db_tool: &mut DbTool<'_, DB>, + from: u64, + to: u64, + output_db: &PlatformPath, + should_run: bool, +) -> Result<()> { + let (output_db, tip_block_number) = setup::(from, to, output_db, db_tool)?; + + output_db.update(|tx| { + tx.import_table_with_range::(&db_tool.db.tx()?, Some(from), to) + })??; + + let tx = db_tool.db.tx()?; + let from_transition_rev = + tx.get::(from)?.expect("there should be at least one."); + let to_transition_rev = + tx.get::(to)?.expect("there should be at least one."); + + output_db.update(|tx| { + tx.import_table_with_range::( + &db_tool.db.tx()?, + Some(from_transition_rev), + to_transition_rev, + ) + })??; + + unwind_and_copy::(db_tool, (from, to), tip_block_number, &output_db).await?; + + if should_run { + dry_run(output_db, to, from).await?; + } + + Ok(()) +} + +/// Dry-run an unwind to FROM block and copy the necessary table data to the new database. +async fn unwind_and_copy( + db_tool: &mut DbTool<'_, DB>, + range: (u64, u64), + tip_block_number: u64, + output_db: &reth_db::mdbx::Env, +) -> eyre::Result<()> { + let (from, to) = range; + let mut unwind_tx = Transaction::new(db_tool.db)?; + let unwind = UnwindInput { unwind_to: from, stage_progress: tip_block_number, bad_block: None }; + let execute_input = reth_stages::ExecInput { + previous_stage: Some((StageId("Another"), to)), + stage_progress: Some(from), + }; + + // Unwind hashes all the way to FROM + StorageHashingStage::default().unwind(&mut unwind_tx, unwind).await.unwrap(); + AccountHashingStage::default().unwind(&mut unwind_tx, unwind).await.unwrap(); + + MerkleStage::default_unwind().unwind(&mut unwind_tx, unwind).await?; + + // Bring Plainstate to TO (hashing stage execution requires it) + let mut exec_stage = ExecutionStage::new_default_threshold(reth_executor::Factory::new( + Arc::new(MAINNET.clone()), + )); + + exec_stage.commit_threshold = u64::MAX; + exec_stage + .unwind( + &mut unwind_tx, + UnwindInput { unwind_to: to, stage_progress: tip_block_number, bad_block: None }, + ) + .await?; + + // Bring hashes to TO + AccountHashingStage { clean_threshold: u64::MAX, commit_threshold: u64::MAX } + .execute(&mut unwind_tx, execute_input) + .await + .unwrap(); + StorageHashingStage { clean_threshold: u64::MAX, commit_threshold: u64::MAX } + .execute(&mut unwind_tx, execute_input) + .await + .unwrap(); + + let unwind_inner_tx = unwind_tx.deref_mut(); + + // TODO optimize we can actually just get the entries we need + output_db.update(|tx| tx.import_dupsort::(unwind_inner_tx))??; + + output_db.update(|tx| tx.import_table::(unwind_inner_tx))??; + output_db.update(|tx| tx.import_dupsort::(unwind_inner_tx))??; + output_db.update(|tx| tx.import_table::(unwind_inner_tx))??; + output_db.update(|tx| tx.import_dupsort::(unwind_inner_tx))??; + + unwind_tx.drop()?; + + Ok(()) +} + +/// Try to re-execute the stage straightaway +async fn dry_run( + output_db: reth_db::mdbx::Env, + to: u64, + from: u64, +) -> eyre::Result<()> { + info!(target: "reth::cli", "Executing stage."); + + let mut tx = Transaction::new(&output_db)?; + let mut exec_output = false; + while !exec_output { + exec_output = MerkleStage::Execution { + clean_threshold: u64::MAX, /* Forces updating the root instead of calculating from + * scratch */ + } + .execute( + &mut tx, + reth_stages::ExecInput { + previous_stage: Some((StageId("Another"), to)), + stage_progress: Some(from), + }, + ) + .await? + .done; + } + + tx.drop()?; + + info!(target: "reth::cli", "Success."); + + Ok(()) +} diff --git a/bin/reth/src/dump_stage/mod.rs b/bin/reth/src/dump_stage/mod.rs index 0bc3863a342..4eddb3817da 100644 --- a/bin/reth/src/dump_stage/mod.rs +++ b/bin/reth/src/dump_stage/mod.rs @@ -8,6 +8,9 @@ use hashing_account::dump_hashing_account_stage; mod execution; use execution::dump_execution_stage; +mod merkle; +use merkle::dump_merkle_stage; + use crate::{ db::DbTool, dirs::{DbPath, PlatformPath}, @@ -45,6 +48,8 @@ pub enum Stages { StorageHashing(StageCommand), /// AccountHashing stage. AccountHashing(StageCommand), + /// Merkle stage. + Merkle(StageCommand), } /// Stage command that takes a range @@ -94,6 +99,9 @@ impl Command { Stages::AccountHashing(StageCommand { output_db, from, to, dry_run, .. }) => { dump_hashing_account_stage(&mut tool, *from, *to, output_db, *dry_run).await? } + Stages::Merkle(StageCommand { output_db, from, to, dry_run, .. }) => { + dump_merkle_stage(&mut tool, *from, *to, output_db, *dry_run).await? + } } Ok(()) diff --git a/bin/reth/src/lib.rs b/bin/reth/src/lib.rs index d22105b0506..36ebcf6324a 100644 --- a/bin/reth/src/lib.rs +++ b/bin/reth/src/lib.rs @@ -1,4 +1,4 @@ -#![warn(missing_docs, unreachable_pub)] +#![warn(missing_docs, unreachable_pub, unused_crate_dependencies)] #![deny(unused_must_use, rust_2018_idioms)] #![doc(test( no_crate_inject, diff --git a/bin/reth/src/node/mod.rs b/bin/reth/src/node/mod.rs index 55a4a9ef368..4a3e426a96f 100644 --- a/bin/reth/src/node/mod.rs +++ b/bin/reth/src/node/mod.rs @@ -13,13 +13,14 @@ use events::NodeEvent; use eyre::Context; use fdlimit::raise_fd_limit; use futures::{pin_mut, stream::select as stream_select, Stream, StreamExt}; -use reth_consensus::beacon::BeaconConsensus; +use reth_beacon_consensus::BeaconConsensus; use reth_db::{ database::Database, mdbx::{Env, WriteMap}, tables, transaction::DbTx, }; +use reth_discv4::DEFAULT_DISCOVERY_PORT; use reth_downloaders::{ bodies::bodies::BodiesDownloaderBuilder, headers::reverse_headers::ReverseHeadersDownloaderBuilder, @@ -36,7 +37,7 @@ use reth_network::{ error::NetworkError, FetchClient, NetworkConfig, NetworkHandle, NetworkManager, }; use reth_network_api::NetworkInfo; -use reth_primitives::{BlockHashOrNumber, ChainSpec, Head, H256}; +use reth_primitives::{BlockHashOrNumber, ChainSpec, Head, Header, SealedHeader, H256}; use reth_provider::{BlockProvider, HeaderProvider, ShareableDatabase}; use reth_rpc_engine_api::{EngineApi, EngineApiHandle}; use reth_staged_sync::{ @@ -49,10 +50,14 @@ use reth_staged_sync::{ }; use reth_stages::{ prelude::*, - stages::{ExecutionStage, SenderRecoveryStage, TotalDifficultyStage, FINISH}, + stages::{ExecutionStage, HeaderSyncMode, SenderRecoveryStage, TotalDifficultyStage, FINISH}, }; use reth_tasks::TaskExecutor; -use std::{net::SocketAddr, path::PathBuf, sync::Arc}; +use std::{ + net::{Ipv4Addr, SocketAddr, SocketAddrV4}, + path::PathBuf, + sync::Arc, +}; use tokio::sync::{mpsc::unbounded_channel, watch}; use tracing::*; @@ -90,7 +95,7 @@ pub struct Command { default_value = "mainnet", value_parser = genesis_value_parser )] - chain: ChainSpec, + chain: Arc, /// Enable Prometheus metrics. /// @@ -101,6 +106,12 @@ pub struct Command { #[clap(flatten)] network: NetworkArgs, + /// Prompt the downloader to download blocks one at a time. + /// + /// NOTE: This is for testing purposes only. + #[arg(long = "debug.continuous", help_heading = "Debug")] + continuous: bool, + /// Set the chain tip manually for testing purposes. /// /// NOTE: This is a temporary flag @@ -143,7 +154,7 @@ impl Command { init_genesis(db.clone(), self.chain.clone())?; - let (consensus, forkchoice_state_tx) = self.init_consensus()?; + let consensus = Arc::new(BeaconConsensus::new(self.chain.clone())) as Arc; info!(target: "reth::cli", "Consensus engine initialized"); self.init_trusted_nodes(&mut config); @@ -159,12 +170,22 @@ impl Command { let _rpc_server = self .rpc - .start_rpc_server(shareable_db.clone(), test_transaction_pool.clone(), network.clone()) + .start_rpc_server( + shareable_db.clone(), + test_transaction_pool.clone(), + network.clone(), + ctx.task_executor.clone(), + ) .await?; info!(target: "reth::cli", "Started RPC server"); - let engine_api_handle = - self.init_engine_api(Arc::clone(&db), forkchoice_state_tx, &ctx.task_executor); + if self.continuous { + info!(target: "reth::cli", "Continuous sync mode enabled"); + } + + // TODO: This will be fixed with the sync controller (https://github.com/paradigmxyz/reth/pull/1662) + let (tx, _rx) = watch::channel(ForkchoiceState::default()); + let engine_api_handle = self.init_engine_api(Arc::clone(&db), tx, &ctx.task_executor); info!(target: "reth::cli", "Engine API handler initialized"); let _auth_server = self @@ -173,6 +194,7 @@ impl Command { shareable_db, test_transaction_pool, network.clone(), + ctx.task_executor.clone(), engine_api_handle, ) .await?; @@ -188,6 +210,16 @@ impl Command { ) .await?; + if let Some(tip) = self.tip { + pipeline.set_tip(tip); + debug!(target: "reth::cli", %tip, "Tip manually set"); + } else { + let warn_msg = "No tip specified. \ + reth cannot communicate with consensus clients, \ + so a tip must manually be provided for the online stages with --debug.tip ."; + warn!(target: "reth::cli", warn_msg); + } + ctx.task_executor.spawn(events::handle_events(Some(network.clone()), events)); // Run pipeline @@ -248,6 +280,7 @@ impl Command { network.clone(), consensus, max_block, + self.continuous, ) .await?; @@ -282,26 +315,6 @@ impl Command { } } - fn init_consensus(&self) -> eyre::Result<(Arc, watch::Sender)> { - let (consensus, notifier) = BeaconConsensus::builder().build(self.chain.clone()); - - if let Some(tip) = self.tip { - debug!(target: "reth::cli", %tip, "Tip manually set"); - notifier.send(ForkchoiceState { - head_block_hash: tip, - safe_block_hash: tip, - finalized_block_hash: tip, - })?; - } else { - let warn_msg = "No tip specified. \ - reth cannot communicate with consensus clients, \ - so a tip must manually be provided for the online stages with --debug.tip ."; - warn!(target: "reth::cli", warn_msg); - } - - Ok((consensus, notifier)) - } - fn init_engine_api( &self, db: Arc>, @@ -315,7 +328,7 @@ impl Command { message_rx, forkchoice_state_tx, ); - task_executor.spawn(engine_api); + task_executor.spawn_critical("engine API task", engine_api); message_tx } @@ -336,11 +349,11 @@ impl Command { NetworkManager::builder(config).await?.request_handler(client).split_with_handle(); let known_peers_file = self.network.persistent_peers_file(); - task_executor.spawn_critical_with_signal("p2p network task", |shutdown| async move { - run_network_until_shutdown(shutdown, network, known_peers_file).await + task_executor.spawn_critical_with_signal("p2p network task", |shutdown| { + run_network_until_shutdown(shutdown, network, known_peers_file) }); - task_executor.spawn_critical("p2p eth request handler", async move { eth.await }); + task_executor.spawn_critical("p2p eth request handler", eth); // TODO spawn pool @@ -380,17 +393,38 @@ impl Command { fetch_client: FetchClient, tip: H256, ) -> Result { - if let Some(number) = db.view(|tx| tx.get::(tip))?? { - info!(target: "reth::cli", ?tip, number, "Successfully looked up tip block number in the database"); - return Ok(number) + Ok(self.fetch_tip(db, fetch_client, BlockHashOrNumber::Hash(tip)).await?.number) + } + + /// Attempt to look up the block with the given number and return the header. + /// + /// NOTE: The download is attempted with infinite retries. + async fn fetch_tip( + &self, + db: Arc>, + fetch_client: FetchClient, + tip: BlockHashOrNumber, + ) -> Result { + let header = db.view(|tx| -> Result, reth_db::Error> { + let number = match tip { + BlockHashOrNumber::Hash(hash) => tx.get::(hash)?, + BlockHashOrNumber::Number(number) => Some(number), + }; + Ok(number.map(|number| tx.get::(number)).transpose()?.flatten()) + })??; + + // try to look up the header in the database + if let Some(header) = header { + info!(target: "reth::cli", ?tip, "Successfully looked up tip block in the database"); + return Ok(header.seal_slow()) } - info!(target: "reth::cli", ?tip, "Fetching tip block number from the network."); + info!(target: "reth::cli", ?tip, "Fetching tip block from the network."); loop { - match get_single_header(fetch_client.clone(), BlockHashOrNumber::Hash(tip)).await { + match get_single_header(fetch_client.clone(), tip).await { Ok(tip_header) => { - info!(target: "reth::cli", ?tip, number = tip_header.number, "Successfully fetched tip block number"); - return Ok(tip_header.number) + info!(target: "reth::cli", ?tip, "Successfully fetched tip"); + return Ok(tip_header) } Err(error) => { error!(target: "reth::cli", %error, "Failed to fetch the tip. Retrying..."); @@ -411,9 +445,14 @@ impl Command { .network_config(config, self.chain.clone()) .with_task_executor(Box::new(executor)) .set_head(head) + .listener_addr(SocketAddr::V4(SocketAddrV4::new( + Ipv4Addr::UNSPECIFIED, + self.network.discovery.port.unwrap_or(DEFAULT_DISCOVERY_PORT), + ))) .build(ShareableDatabase::new(db, self.chain.clone())) } + #[allow(clippy::too_many_arguments)] async fn build_pipeline( &self, config: &Config, @@ -422,6 +461,7 @@ impl Command { updater: U, consensus: &Arc, max_block: Option, + continuous: bool, ) -> eyre::Result, U>> where H: HeaderDownloader + 'static, @@ -437,21 +477,31 @@ impl Command { builder = builder.with_max_block(max_block) } + let (tip_tx, tip_rx) = watch::channel(H256::zero()); + let factory = reth_executor::Factory::new(self.chain.clone()); + + let header_mode = + if continuous { HeaderSyncMode::Continuous } else { HeaderSyncMode::Tip(tip_rx) }; let pipeline = builder .with_sync_state_updater(updater.clone()) + .with_tip_sender(tip_tx) .add_stages( - DefaultStages::new(consensus.clone(), header_downloader, body_downloader, updater) - .set(TotalDifficultyStage { - chain_spec: self.chain.clone(), - commit_threshold: stage_conf.total_difficulty.commit_threshold, - }) - .set(SenderRecoveryStage { - commit_threshold: stage_conf.sender_recovery.commit_threshold, - }) - .set(ExecutionStage { - chain_spec: self.chain.clone(), - commit_threshold: stage_conf.execution.commit_threshold, - }), + DefaultStages::new( + header_mode, + consensus.clone(), + header_downloader, + body_downloader, + updater, + factory.clone(), + ) + .set( + TotalDifficultyStage::new(consensus.clone()) + .with_commit_threshold(stage_conf.total_difficulty.commit_threshold), + ) + .set(SenderRecoveryStage { + commit_threshold: stage_conf.sender_recovery.commit_threshold, + }) + .set(ExecutionStage::new(factory, stage_conf.execution.commit_threshold)), ) .build(); @@ -479,7 +529,8 @@ async fn run_network_until_shutdown( let known_peers = network.all_peers().collect::>(); if let Ok(known_peers) = serde_json::to_string_pretty(&known_peers) { trace!(target : "reth::cli", peers_file =?file_path, num_peers=%known_peers.len(), "Saving current peers"); - match std::fs::write(&file_path, known_peers) { + let parent_dir = file_path.parent().map(std::fs::create_dir_all).transpose(); + match parent_dir.and_then(|_| std::fs::write(&file_path, known_peers)) { Ok(_) => { info!(target: "reth::cli", peers_file=?file_path, "Wrote network peers to file"); } @@ -495,6 +546,12 @@ async fn run_network_until_shutdown( mod tests { use super::*; + #[test] + fn parse_help_node_command() { + let err = Command::try_parse_from(["reth", "--help"]).unwrap_err(); + assert_eq!(err.kind(), clap::error::ErrorKind::DisplayHelp); + } + #[test] fn parse_common_node_command_chain_args() { for chain in ["mainnet", "sepolia", "goerli"] { diff --git a/bin/reth/src/p2p/mod.rs b/bin/reth/src/p2p/mod.rs index d8b27777ba5..44960d76c66 100644 --- a/bin/reth/src/p2p/mod.rs +++ b/bin/reth/src/p2p/mod.rs @@ -39,7 +39,7 @@ pub struct Command { default_value = "mainnet", value_parser = chain_spec_value_parser )] - chain: ChainSpec, + chain: Arc, /// Disable the discovery service. #[command(flatten)] @@ -137,7 +137,7 @@ impl Command { }; let (_, result) = (move || { let client = fetch_client.clone(); - async move { client.get_block_bodies(vec![hash]).await } + client.get_block_bodies(vec![hash]) }) .retry(&backoff) .notify(|err, _| println!("Error requesting block: {err}. Retrying...")) diff --git a/bin/reth/src/runner.rs b/bin/reth/src/runner.rs index 84afc3847f0..d0a171f33df 100644 --- a/bin/reth/src/runner.rs +++ b/bin/reth/src/runner.rs @@ -37,12 +37,17 @@ impl CliRunner { // fires the shutdown signal to all tasks spawned via the task executor drop(task_manager); + // drop the tokio runtime on a separate thread because drop blocks until its pools + // (including blocking pool) are shutdown. In other words `drop(tokio_runtime)` would block + // the current thread but we want to exit right away. + std::thread::spawn(move || drop(tokio_runtime)); + // give all tasks that are now being shut down some time to finish before tokio leaks them // see [Runtime::shutdown_timeout](tokio::runtime::Runtime::shutdown_timeout) // TODO: enable this again, when pipeline/stages are not longer blocking tasks - std::process::exit(0); // warn!(target: "reth::cli", "Received shutdown signal, waiting up to 30 seconds for // tasks."); tokio_runtime.shutdown_timeout(Duration::from_secs(30)); + Ok(()) } /// Executes a regular future until completion or until external signal received. diff --git a/bin/reth/src/stage/mod.rs b/bin/reth/src/stage/mod.rs index 8153567a4fd..b57bb7f4544 100644 --- a/bin/reth/src/stage/mod.rs +++ b/bin/reth/src/stage/mod.rs @@ -7,7 +7,7 @@ use crate::{ prometheus_exporter, }; use clap::{Parser, ValueEnum}; -use reth_consensus::beacon::BeaconConsensus; +use reth_beacon_consensus::BeaconConsensus; use reth_downloaders::bodies::bodies::BodiesDownloaderBuilder; use reth_primitives::ChainSpec; use reth_provider::{ShareableDatabase, Transaction}; @@ -54,7 +54,7 @@ pub struct Command { default_value = "mainnet", value_parser = chain_spec_value_parser )] - chain: ChainSpec, + chain: Arc, /// Enable Prometheus metrics. /// @@ -123,7 +123,7 @@ impl Command { match self.stage { StageEnum::Bodies => { - let (consensus, _) = BeaconConsensus::builder().build(self.chain.clone()); + let consensus = Arc::new(BeaconConsensus::new(self.chain.clone())); let mut config = config; config.peers.connect_trusted_nodes_only = self.network.trusted_only; @@ -171,8 +171,9 @@ impl Command { stage.execute(&mut tx, input).await?; } StageEnum::Execution => { - let mut stage = - ExecutionStage { chain_spec: self.chain.clone(), commit_threshold: num_blocks }; + let factory = reth_executor::Factory::new(self.chain.clone()); + let mut stage = ExecutionStage::new(factory, 10_000); + stage.commit_threshold = num_blocks; if !self.skip_unwind { stage.unwind(&mut tx, unwind).await?; } diff --git a/bin/reth/src/test_eth_chain/runner.rs b/bin/reth/src/test_eth_chain/runner.rs index 45c1225bf26..277c22d74bf 100644 --- a/bin/reth/src/test_eth_chain/runner.rs +++ b/bin/reth/src/test_eth_chain/runner.rs @@ -10,8 +10,8 @@ use reth_db::{ Error as DbError, }; use reth_primitives::{ - keccak256, Account as RethAccount, Address, ChainSpec, ForkCondition, Hardfork, JsonU256, - SealedBlock, SealedHeader, StorageEntry, H256, U256, + keccak256, Account as RethAccount, Address, Bytecode, ChainSpec, ForkCondition, Hardfork, + JsonU256, SealedBlock, SealedHeader, StorageEntry, H256, U256, }; use reth_provider::Transaction; use reth_rlp::Decodable; @@ -20,6 +20,7 @@ use std::{ collections::HashMap, ffi::OsStr, path::{Path, PathBuf}, + sync::Arc, }; use tracing::{debug, trace}; @@ -139,19 +140,19 @@ pub async fn run_test(path: PathBuf) -> eyre::Result { // insert genesis let header: SealedHeader = suite.genesis_block_header.into(); let genesis_block = SealedBlock { header, body: vec![], ommers: vec![], withdrawals: None }; - reth_provider::insert_canonical_block(&tx, &genesis_block, has_block_reward)?; + reth_provider::insert_canonical_block(&tx, genesis_block, None, has_block_reward)?; let mut last_block = None; suite.blocks.iter().try_for_each(|block| -> eyre::Result<()> { let decoded = SealedBlock::decode(&mut block.rlp.as_ref())?; - reth_provider::insert_canonical_block(&tx, &decoded, has_block_reward)?; last_block = Some(decoded.number); + reth_provider::insert_canonical_block(&tx, decoded, None, has_block_reward)?; Ok(()) })?; pre_state.into_iter().try_for_each(|(address, account)| -> eyre::Result<()> { let has_code = !account.code.is_empty(); - let code_hash = if has_code { Some(keccak256(&account.code)) } else { None }; + let code_hash = has_code.then(|| keccak256(&account.code)); tx.put::( address, RethAccount { @@ -161,7 +162,7 @@ pub async fn run_test(path: PathBuf) -> eyre::Result { }, )?; if let Some(code_hash) = code_hash { - tx.put::(code_hash, account.code.to_vec())?; + tx.put::(code_hash, Bytecode::new_raw(account.code.0))?; } account.storage.iter().try_for_each(|(k, v)| { trace!(target: "reth::cli", ?address, key = ?k.0, value = ?v.0, "Update storage"); @@ -193,7 +194,8 @@ pub async fn run_test(path: PathBuf) -> eyre::Result { // Initialize the execution stage // Hardcode the chain_id to Ethereum 1. - let mut stage = ExecutionStage::new(chain_spec, 1000); + let factory = reth_executor::Factory::new(Arc::new(chain_spec)); + let mut stage = ExecutionStage::new(factory, 1_000); // Call execution stage let input = ExecInput { diff --git a/crates/consensus/Cargo.toml b/crates/consensus/Cargo.toml index bd742d0bed0..fc76b357ab2 100644 --- a/crates/consensus/Cargo.toml +++ b/crates/consensus/Cargo.toml @@ -21,4 +21,4 @@ reth-provider = { path = "../storage/provider", features = ["test-utils"] } assert_matches = "1.5.0" [features] -optimism = [] \ No newline at end of file +optimism = ["reth-primitives/optimism"] diff --git a/crates/consensus/beacon/Cargo.toml b/crates/consensus/beacon/Cargo.toml new file mode 100644 index 00000000000..1ff30c99f1f --- /dev/null +++ b/crates/consensus/beacon/Cargo.toml @@ -0,0 +1,16 @@ +[package] +name = "reth-beacon-consensus" +version = "0.1.0" +edition = "2021" +license = "MIT OR Apache-2.0" +repository = "https://github.com/paradigmxyz/reth" +readme = "README.md" + +[dependencies] +# reth +reth-consensus-common = { path = "../common" } +reth-primitives = { path = "../../primitives" } +reth-interfaces = { path = "../../interfaces" } + +[dev-dependencies] +reth-interfaces = { path = "../../interfaces", features = ["test-utils"] } diff --git a/crates/consensus/src/beacon/beacon_consensus.rs b/crates/consensus/beacon/src/beacon_consensus.rs similarity index 62% rename from crates/consensus/src/beacon/beacon_consensus.rs rename to crates/consensus/beacon/src/beacon_consensus.rs index 779073d5031..2018d320252 100644 --- a/crates/consensus/src/beacon/beacon_consensus.rs +++ b/crates/consensus/beacon/src/beacon_consensus.rs @@ -1,69 +1,56 @@ //! Consensus for ethereum network -use crate::validation; -use reth_interfaces::consensus::{Consensus, Error, ForkchoiceState}; +use reth_consensus_common::validation; +use reth_interfaces::consensus::{Consensus, ConsensusError}; use reth_primitives::{ChainSpec, Hardfork, SealedBlock, SealedHeader, EMPTY_OMMER_ROOT, U256}; -use tokio::sync::watch; - -use super::BeaconConsensusBuilder; +use std::sync::Arc; /// Ethereum beacon consensus /// -/// This consensus engine does basic checks as outlined in the execution specs, -/// but otherwise defers consensus on what the current chain is to a consensus client. +/// This consensus engine does basic checks as outlined in the execution specs. #[derive(Debug)] pub struct BeaconConsensus { - /// Watcher over the forkchoice state - forkchoice_state_rx: watch::Receiver, /// Configuration - chain_spec: ChainSpec, + chain_spec: Arc, } impl BeaconConsensus { /// Create a new instance of [BeaconConsensus] - pub fn new( - chain_spec: ChainSpec, - forkchoice_state_rx: watch::Receiver, - ) -> Self { - Self { chain_spec, forkchoice_state_rx } - } - - /// Create new [BeaconConsensusBuilder]. - pub fn builder() -> BeaconConsensusBuilder { - BeaconConsensusBuilder::default() + pub fn new(chain_spec: Arc) -> Self { + Self { chain_spec } } } impl Consensus for BeaconConsensus { - fn fork_choice_state(&self) -> watch::Receiver { - self.forkchoice_state_rx.clone() - } - fn pre_validate_header( &self, header: &SealedHeader, parent: &SealedHeader, - ) -> Result<(), Error> { + ) -> Result<(), ConsensusError> { validation::validate_header_standalone(header, &self.chain_spec)?; validation::validate_header_regarding_parent(parent, header, &self.chain_spec)?; Ok(()) } - fn validate_header(&self, header: &SealedHeader, total_difficulty: U256) -> Result<(), Error> { + fn validate_header( + &self, + header: &SealedHeader, + total_difficulty: U256, + ) -> Result<(), ConsensusError> { if self.chain_spec.fork(Hardfork::Paris).active_at_ttd(total_difficulty, header.difficulty) { // EIP-3675: Upgrade consensus to Proof-of-Stake: // https://eips.ethereum.org/EIPS/eip-3675#replacing-difficulty-with-0 if header.difficulty != U256::ZERO { - return Err(Error::TheMergeDifficultyIsNotZero) + return Err(ConsensusError::TheMergeDifficultyIsNotZero) } if header.nonce != 0 { - return Err(Error::TheMergeNonceIsNotZero) + return Err(ConsensusError::TheMergeNonceIsNotZero) } if header.ommers_hash != EMPTY_OMMER_ROOT { - return Err(Error::TheMergeOmmerRootIsNotEmpty) + return Err(ConsensusError::TheMergeOmmerRootIsNotEmpty) } // mixHash is used instead of difficulty inside EVM @@ -77,7 +64,7 @@ impl Consensus for BeaconConsensus { Ok(()) } - fn pre_validate_block(&self, block: &SealedBlock) -> Result<(), Error> { + fn pre_validate_block(&self, block: &SealedBlock) -> Result<(), ConsensusError> { validation::validate_block_standalone(block, &self.chain_spec) } @@ -88,15 +75,15 @@ impl Consensus for BeaconConsensus { #[cfg(test)] mod test { + use super::BeaconConsensus; use reth_interfaces::consensus::Consensus; use reth_primitives::{ChainSpecBuilder, U256}; - - use super::BeaconConsensus; + use std::sync::Arc; #[test] fn test_has_block_reward_before_paris() { - let chain_spec = ChainSpecBuilder::mainnet().build(); - let (consensus, _) = BeaconConsensus::builder().build(chain_spec); + let chain_spec = Arc::new(ChainSpecBuilder::mainnet().build()); + let consensus = BeaconConsensus::new(chain_spec); assert!(consensus.has_block_reward(U256::ZERO, U256::ZERO)); } } diff --git a/crates/consensus/beacon/src/lib.rs b/crates/consensus/beacon/src/lib.rs new file mode 100644 index 00000000000..04d5af32876 --- /dev/null +++ b/crates/consensus/beacon/src/lib.rs @@ -0,0 +1,10 @@ +#![warn(missing_docs, unreachable_pub, unused_crate_dependencies)] +#![deny(unused_must_use, rust_2018_idioms)] +#![doc(test( + no_crate_inject, + attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables)) +))] +//! Beacon consensus implementation. + +mod beacon_consensus; +pub use beacon_consensus::BeaconConsensus; diff --git a/crates/consensus/common/Cargo.toml b/crates/consensus/common/Cargo.toml new file mode 100644 index 00000000000..f1f79e5d70a --- /dev/null +++ b/crates/consensus/common/Cargo.toml @@ -0,0 +1,23 @@ +[package] +name = "reth-consensus-common" +version = "0.1.0" +edition = "2021" +license = "MIT OR Apache-2.0" +repository = "https://github.com/paradigmxyz/reth" +readme = "README.md" + +[dependencies] +# reth +reth-primitives = { path = "../../primitives" } +reth-interfaces = { path = "../../interfaces" } +reth-provider = { path = "../../storage/provider" } + + +[dev-dependencies] +reth-interfaces = { path = "../../interfaces", features = ["test-utils"] } +reth-provider = { path = "../../storage/provider", features = ["test-utils"] } +assert_matches = "1.5.0" +mockall = "0.11.3" + +[features] +optimism = ["reth-primitives/optimism"] diff --git a/crates/consensus/src/lib.rs b/crates/consensus/common/src/lib.rs similarity index 76% rename from crates/consensus/src/lib.rs rename to crates/consensus/common/src/lib.rs index 35bf33b0334..4b1917d612d 100644 --- a/crates/consensus/src/lib.rs +++ b/crates/consensus/common/src/lib.rs @@ -4,10 +4,8 @@ no_crate_inject, attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables)) ))] -//! Consensus algorithms for Ethereum. -/// Beacon consensus implementation. -pub mod beacon; +//! Commonly used consensus methods. /// Collection of consensus validation methods. pub mod validation; diff --git a/crates/consensus/src/validation.rs b/crates/consensus/common/src/validation.rs similarity index 73% rename from crates/consensus/src/validation.rs rename to crates/consensus/common/src/validation.rs index 46363405bac..3e3eee7fa0e 100644 --- a/crates/consensus/src/validation.rs +++ b/crates/consensus/common/src/validation.rs @@ -1,17 +1,15 @@ //! Collection of methods for block validation. -use reth_interfaces::{consensus::Error, Result as RethResult}; +use reth_interfaces::{consensus::ConsensusError, Result as RethResult}; use reth_primitives::{ - BlockNumber, ChainSpec, Hardfork, Header, SealedBlock, SealedHeader, Transaction, - TransactionSignedEcRecovered, TxEip1559, TxEip2930, TxLegacy, + constants, BlockNumber, ChainSpec, Hardfork, Header, InvalidTransactionError, SealedBlock, + SealedHeader, Transaction, TransactionSignedEcRecovered, TxEip1559, TxEip2930, TxLegacy, }; -use reth_provider::{AccountProvider, HeaderProvider}; +use reth_provider::{AccountProvider, HeaderProvider, WithdrawalsProvider}; use std::{ collections::{hash_map::Entry, HashMap}, time::SystemTime, }; -use reth_primitives::constants; - #[cfg(feature = "optimism")] use reth_primitives::TxDeposit; @@ -19,10 +17,10 @@ use reth_primitives::TxDeposit; pub fn validate_header_standalone( header: &SealedHeader, chain_spec: &ChainSpec, -) -> Result<(), Error> { +) -> Result<(), ConsensusError> { // Gas used needs to be less then gas limit. Gas used is going to be check after execution. if header.gas_used > header.gas_limit { - return Err(Error::HeaderGasUsedExceedsGasLimit { + return Err(ConsensusError::HeaderGasUsedExceedsGasLimit { gas_used: header.gas_used, gas_limit: header.gas_limit, }) @@ -32,31 +30,34 @@ pub fn validate_header_standalone( let present_timestamp = SystemTime::now().duration_since(SystemTime::UNIX_EPOCH).unwrap().as_secs(); if header.timestamp > present_timestamp { - return Err(Error::TimestampIsInFuture { timestamp: header.timestamp, present_timestamp }) + return Err(ConsensusError::TimestampIsInFuture { + timestamp: header.timestamp, + present_timestamp, + }) } // From yellow paper: extraData: An arbitrary byte array containing data // relevant to this block. This must be 32 bytes or fewer; formally Hx. if header.extra_data.len() > 32 { - return Err(Error::ExtraDataExceedsMax { len: header.extra_data.len() }) + return Err(ConsensusError::ExtraDataExceedsMax { len: header.extra_data.len() }) } // Check if base fee is set. if chain_spec.fork(Hardfork::London).active_at_block(header.number) && header.base_fee_per_gas.is_none() { - return Err(Error::BaseFeeMissing) + return Err(ConsensusError::BaseFeeMissing) } // EIP-4895: Beacon chain push withdrawals as operations if chain_spec.fork(Hardfork::Shanghai).active_at_timestamp(header.timestamp) && header.withdrawals_root.is_none() { - return Err(Error::WithdrawalsRootMissing) + return Err(ConsensusError::WithdrawalsRootMissing) } else if !chain_spec.fork(Hardfork::Shanghai).active_at_timestamp(header.timestamp) && header.withdrawals_root.is_some() { - return Err(Error::WithdrawalsRootUnexpected) + return Err(ConsensusError::WithdrawalsRootUnexpected) } Ok(()) @@ -70,26 +71,21 @@ pub fn validate_transaction_regarding_header( chain_spec: &ChainSpec, at_block_number: BlockNumber, base_fee: Option, -) -> Result<(), Error> { +) -> Result<(), ConsensusError> { let chain_id = match transaction { - #[cfg(feature = "optimism")] - Transaction::Deposit(TxDeposit { .. }) => { - // TODO: get the chain id - None - } Transaction::Legacy(TxLegacy { chain_id, .. }) => { // EIP-155: Simple replay attack protection: https://eips.ethereum.org/EIPS/eip-155 if chain_spec.fork(Hardfork::SpuriousDragon).active_at_block(at_block_number) && chain_id.is_some() { - return Err(Error::TransactionOldLegacyChainId) + return Err(InvalidTransactionError::OldLegacyChainId.into()) } *chain_id } Transaction::Eip2930(TxEip2930 { chain_id, .. }) => { // EIP-2930: Optional access lists: https://eips.ethereum.org/EIPS/eip-2930 (New transaction type) if !chain_spec.fork(Hardfork::Berlin).active_at_block(at_block_number) { - return Err(Error::TransactionEip2930Disabled) + return Err(InvalidTransactionError::Eip2930Disabled.into()) } Some(*chain_id) } @@ -101,28 +97,33 @@ pub fn validate_transaction_regarding_header( }) => { // EIP-1559: Fee market change for ETH 1.0 chain https://eips.ethereum.org/EIPS/eip-1559 if !chain_spec.fork(Hardfork::Berlin).active_at_block(at_block_number) { - return Err(Error::TransactionEip1559Disabled) + return Err(InvalidTransactionError::Eip1559Disabled.into()) } // EIP-1559: add more constraints to the tx validation // https://github.com/ethereum/EIPs/pull/3594 if max_priority_fee_per_gas > max_fee_per_gas { - return Err(Error::TransactionPriorityFeeMoreThenMaxFee) + return Err(InvalidTransactionError::PriorityFeeMoreThenMaxFee.into()) } Some(*chain_id) } + #[cfg(feature = "optimism")] + Transaction::Deposit(TxDeposit { .. }) => { + // TODO: get the chain id + None + } }; if let Some(chain_id) = chain_id { if chain_id != chain_spec.chain().id() { - return Err(Error::TransactionChainId) + return Err(InvalidTransactionError::ChainIdMismatch.into()) } } // Check basefee and few checks that are related to that. // https://github.com/ethereum/EIPs/pull/3594 if let Some(base_fee_per_gas) = base_fee { if transaction.max_fee_per_gas() < base_fee_per_gas as u128 { - return Err(Error::TransactionMaxFeeLessThenBaseFee) + return Err(InvalidTransactionError::MaxFeeLessThenBaseFee.into()) } } @@ -163,7 +164,10 @@ pub fn validate_all_transaction_regarding_block_and_nonces< // Signer account shouldn't have bytecode. Presence of bytecode means this is a // smartcontract. if account.has_bytecode() { - return Err(Error::SignerAccountHasBytecode.into()) + return Err(ConsensusError::from( + InvalidTransactionError::SignerAccountHasBytecode, + ) + .into()) } let nonce = account.nonce; entry.insert(account.nonce + 1); @@ -173,7 +177,7 @@ pub fn validate_all_transaction_regarding_block_and_nonces< // check nonce if transaction.nonce() != nonce { - return Err(Error::TransactionNonceNotConsistent.into()) + return Err(ConsensusError::from(InvalidTransactionError::NonceNotConsistent).into()) } } @@ -186,13 +190,16 @@ pub fn validate_all_transaction_regarding_block_and_nonces< /// - Compares the transactions root in the block header to the block body /// - Pre-execution transaction validation /// - (Optionally) Compares the receipts root in the block header to the block body -pub fn validate_block_standalone(block: &SealedBlock, chain_spec: &ChainSpec) -> Result<(), Error> { +pub fn validate_block_standalone( + block: &SealedBlock, + chain_spec: &ChainSpec, +) -> Result<(), ConsensusError> { // Check ommers hash // TODO(onbjerg): This should probably be accessible directly on [Block] let ommers_hash = reth_primitives::proofs::calculate_ommers_root(block.ommers.iter().map(|h| h.as_ref())); if block.header.ommers_hash != ommers_hash { - return Err(Error::BodyOmmersHashDiff { + return Err(ConsensusError::BodyOmmersHashDiff { got: ommers_hash, expected: block.header.ommers_hash, }) @@ -202,7 +209,7 @@ pub fn validate_block_standalone(block: &SealedBlock, chain_spec: &ChainSpec) -> // TODO(onbjerg): This should probably be accessible directly on [Block] let transaction_root = reth_primitives::proofs::calculate_transaction_root(block.body.iter()); if block.header.transactions_root != transaction_root { - return Err(Error::BodyTransactionRootDiff { + return Err(ConsensusError::BodyTransactionRootDiff { got: transaction_root, expected: block.header.transactions_root, }) @@ -210,13 +217,14 @@ pub fn validate_block_standalone(block: &SealedBlock, chain_spec: &ChainSpec) -> // EIP-4895: Beacon chain push withdrawals as operations if chain_spec.fork(Hardfork::Shanghai).active_at_timestamp(block.timestamp) { - let withdrawals = block.withdrawals.as_ref().ok_or(Error::BodyWithdrawalsMissing)?; + let withdrawals = + block.withdrawals.as_ref().ok_or(ConsensusError::BodyWithdrawalsMissing)?; let withdrawals_root = reth_primitives::proofs::calculate_withdrawals_root(withdrawals.iter()); let header_withdrawals_root = - block.withdrawals_root.as_ref().ok_or(Error::WithdrawalsRootMissing)?; + block.withdrawals_root.as_ref().ok_or(ConsensusError::WithdrawalsRootMissing)?; if withdrawals_root != *header_withdrawals_root { - return Err(Error::BodyWithdrawalsRootDiff { + return Err(ConsensusError::BodyWithdrawalsRootDiff { got: withdrawals_root, expected: *header_withdrawals_root, }) @@ -228,7 +236,10 @@ pub fn validate_block_standalone(block: &SealedBlock, chain_spec: &ChainSpec) -> for withdrawal in withdrawals.iter().skip(1) { let expected = prev_index + 1; if expected != withdrawal.index { - return Err(Error::WithdrawalIndexInvalid { got: withdrawal.index, expected }) + return Err(ConsensusError::WithdrawalIndexInvalid { + got: withdrawal.index, + expected, + }) } prev_index = withdrawal.index; } @@ -269,10 +280,10 @@ pub fn validate_header_regarding_parent( parent: &SealedHeader, child: &SealedHeader, chain_spec: &ChainSpec, -) -> Result<(), Error> { +) -> Result<(), ConsensusError> { // Parent number is consistent. if parent.number + 1 != child.number { - return Err(Error::ParentBlockNumberMismatch { + return Err(ConsensusError::ParentBlockNumberMismatch { parent_block_number: parent.number, block_number: child.number, }) @@ -280,7 +291,7 @@ pub fn validate_header_regarding_parent( // timestamp in past check if child.timestamp < parent.timestamp { - return Err(Error::TimestampIsInPast { + return Err(ConsensusError::TimestampIsInPast { parent_timestamp: parent.timestamp, timestamp: child.timestamp, }) @@ -303,13 +314,13 @@ pub fn validate_header_regarding_parent( // Check gas limit, max diff between child/parent gas_limit should be max_diff=parent_gas/1024 if child.gas_limit > parent_gas_limit { if child.gas_limit - parent_gas_limit >= parent_gas_limit / 1024 { - return Err(Error::GasLimitInvalidIncrease { + return Err(ConsensusError::GasLimitInvalidIncrease { parent_gas_limit, child_gas_limit: child.gas_limit, }) } } else if parent_gas_limit - child.gas_limit >= parent_gas_limit / 1024 { - return Err(Error::GasLimitInvalidDecrease { + return Err(ConsensusError::GasLimitInvalidDecrease { parent_gas_limit, child_gas_limit: child.gas_limit, }) @@ -317,7 +328,7 @@ pub fn validate_header_regarding_parent( // EIP-1559 check base fee if chain_spec.fork(Hardfork::London).active_at_block(child.number) { - let base_fee = child.base_fee_per_gas.ok_or(Error::BaseFeeMissing)?; + let base_fee = child.base_fee_per_gas.ok_or(ConsensusError::BaseFeeMissing)?; let expected_base_fee = if chain_spec.fork(Hardfork::London).transitions_at_block(child.number) { @@ -327,11 +338,11 @@ pub fn validate_header_regarding_parent( calculate_next_block_base_fee( parent.gas_used, parent.gas_limit, - parent.base_fee_per_gas.ok_or(Error::BaseFeeMissing)?, + parent.base_fee_per_gas.ok_or(ConsensusError::BaseFeeMissing)?, ) }; if expected_base_fee != base_fee { - return Err(Error::BaseFeeDiff { expected: expected_base_fee, got: base_fee }) + return Err(ConsensusError::BaseFeeDiff { expected: expected_base_fee, got: base_fee }) } } @@ -343,9 +354,10 @@ pub fn validate_header_regarding_parent( /// Checks: /// If we already know the block. /// If parent is known +/// If withdarwals are valid /// /// Returns parent block header -pub fn validate_block_regarding_chain( +pub fn validate_block_regarding_chain( block: &SealedBlock, provider: &PROV, ) -> RethResult { @@ -353,20 +365,47 @@ pub fn validate_block_regarding_chain( // Check if block is known. if provider.is_known(&hash)? { - return Err(Error::BlockKnown { hash, number: block.header.number }.into()) + return Err(ConsensusError::BlockKnown { hash, number: block.header.number }.into()) } // Check if parent is known. let parent = provider .header(&block.parent_hash)? - .ok_or(Error::ParentUnknown { hash: block.parent_hash })?; + .ok_or(ConsensusError::ParentUnknown { hash: block.parent_hash })?; + + // Check if withdrawals are valid. + if let Some(withdrawals) = &block.withdrawals { + if !withdrawals.is_empty() { + let latest_withdrawal = provider.latest_withdrawal()?; + match latest_withdrawal { + Some(withdrawal) => { + if withdrawal.index + 1 != withdrawals.first().unwrap().index { + return Err(ConsensusError::WithdrawalIndexInvalid { + got: withdrawals.first().unwrap().index, + expected: withdrawal.index + 1, + } + .into()) + } + } + None => { + if withdrawals.first().unwrap().index != 0 { + return Err(ConsensusError::WithdrawalIndexInvalid { + got: withdrawals.first().unwrap().index, + expected: 0, + } + .into()) + } + } + } + } + } // Return parent header. Ok(parent.seal(block.parent_hash)) } /// Full validation of block before execution. -pub fn full_validation( +pub fn full_validation( block: &SealedBlock, provider: Provider, chain_spec: &ChainSpec, @@ -380,7 +419,7 @@ pub fn full_validation( let transactions = block .body .iter() - .map(|tx| tx.try_ecrecovered().ok_or(Error::TransactionSignerRecoveryError)) + .map(|tx| tx.try_ecrecovered().ok_or(ConsensusError::TransactionSignerRecoveryError)) .collect::, _>>()?; validate_all_transaction_regarding_block_and_nonces( @@ -396,10 +435,11 @@ pub fn full_validation( mod tests { use super::*; use assert_matches::assert_matches; - use reth_interfaces::Result; + use mockall::mock; + use reth_interfaces::{Error::Consensus, Result}; use reth_primitives::{ - hex_literal::hex, proofs, Account, Address, BlockHash, Bytes, ChainSpecBuilder, Header, - Signature, TransactionKind, TransactionSigned, Withdrawal, MAINNET, U256, + hex_literal::hex, proofs, Account, Address, BlockHash, BlockId, Bytes, ChainSpecBuilder, + Header, Signature, TransactionKind, TransactionSigned, Withdrawal, MAINNET, U256, }; use std::ops::RangeBounds; @@ -430,20 +470,45 @@ mod tests { } } + mock! { + WithdrawalsProvider {} + + impl WithdrawalsProvider for WithdrawalsProvider { + fn latest_withdrawal(&self) -> Result> ; + + fn withdrawals_by_block( + &self, + _id: BlockId, + _timestamp: u64, + ) -> RethResult>> ; + } + } + struct Provider { is_known: bool, parent: Option
, account: Option, + withdrawals_provider: MockWithdrawalsProvider, } impl Provider { /// New provider with parent fn new(parent: Option
) -> Self { - Self { is_known: false, parent, account: None } + Self { + is_known: false, + parent, + account: None, + withdrawals_provider: MockWithdrawalsProvider::new(), + } } /// New provider where is_known is always true fn new_known() -> Self { - Self { is_known: true, parent: None, account: None } + Self { + is_known: true, + parent: None, + account: None, + withdrawals_provider: MockWithdrawalsProvider::new(), + } } } @@ -479,6 +544,20 @@ mod tests { } } + impl WithdrawalsProvider for Provider { + fn latest_withdrawal(&self) -> Result> { + self.withdrawals_provider.latest_withdrawal() + } + + fn withdrawals_by_block( + &self, + _id: BlockId, + _timestamp: u64, + ) -> RethResult>> { + self.withdrawals_provider.withdrawals_by_block(_id, _timestamp) + } + } + fn mock_tx(nonce: u64) -> TransactionSignedEcRecovered { let request = Transaction::Eip2930(TxEip2930 { chain_id: 1u64, @@ -520,7 +599,7 @@ mod tests { mix_hash: hex!("0000000000000000000000000000000000000000000000000000000000000000").into(), nonce: 0x0000000000000000, base_fee_per_gas: 0x28f0001df.into(), - withdrawals_root: None + withdrawals_root: None, }; // size: 0x9b5 @@ -551,7 +630,7 @@ mod tests { assert_eq!( full_validation(&block, provider, &MAINNET), - Err(Error::BlockKnown { hash: block.hash(), number: block.number }.into()), + Err(ConsensusError::BlockKnown { hash: block.hash(), number: block.number }.into()), "Should fail with error" ); } @@ -587,7 +666,7 @@ mod tests { provider, &MAINNET, ), - Err(Error::TransactionNonceNotConsistent.into()) + Err(ConsensusError::from(InvalidTransactionError::NonceNotConsistent).into()) ) } @@ -606,7 +685,7 @@ mod tests { provider, &MAINNET, ), - Err(Error::TransactionNonceNotConsistent.into()) + Err(ConsensusError::from(InvalidTransactionError::NonceNotConsistent).into()) ); } @@ -644,13 +723,46 @@ mod tests { let block = create_block_with_withdrawals(&[100, 102]); assert_matches!( validate_block_standalone(&block, &chain_spec), - Err(Error::WithdrawalIndexInvalid { .. }) + Err(ConsensusError::WithdrawalIndexInvalid { .. }) ); let block = create_block_with_withdrawals(&[5, 6, 7, 9]); assert_matches!( validate_block_standalone(&block, &chain_spec), - Err(Error::WithdrawalIndexInvalid { .. }) + Err(ConsensusError::WithdrawalIndexInvalid { .. }) ); + + let (_, parent) = mock_block(); + let mut provider = Provider::new(Some(parent.clone())); + // Withdrawal index should be 0 if there are no withdrawals in the chain + let block = create_block_with_withdrawals(&[1, 2, 3]); + provider.withdrawals_provider.expect_latest_withdrawal().return_const(Ok(None)); + assert_matches!( + validate_block_regarding_chain(&block, &provider), + Err(Consensus(ConsensusError::WithdrawalIndexInvalid { got: 1, expected: 0 })) + ); + let block = create_block_with_withdrawals(&[0, 1, 2]); + let res = validate_block_regarding_chain(&block, &provider); + assert!(res.is_ok()); + + // Withdrawal index should be the last withdrawal index + 1 + let mut provider = Provider::new(Some(parent)); + let block = create_block_with_withdrawals(&[4, 5, 6]); + provider + .withdrawals_provider + .expect_latest_withdrawal() + .return_const(Ok(Some(Withdrawal { index: 2, ..Default::default() }))); + assert_matches!( + validate_block_regarding_chain(&block, &provider), + Err(Consensus(ConsensusError::WithdrawalIndexInvalid { got: 4, expected: 3 })) + ); + + let block = create_block_with_withdrawals(&[3, 4, 5]); + provider + .withdrawals_provider + .expect_latest_withdrawal() + .return_const(Ok(Some(Withdrawal { index: 2, ..Default::default() }))); + let res = validate_block_regarding_chain(&block, &provider); + assert!(res.is_ok()); } #[test] diff --git a/crates/consensus/src/beacon/builder.rs b/crates/consensus/src/beacon/builder.rs deleted file mode 100644 index d3f2bd97b45..00000000000 --- a/crates/consensus/src/beacon/builder.rs +++ /dev/null @@ -1,22 +0,0 @@ -use super::BeaconConsensus; -use reth_interfaces::consensus::ForkchoiceState; -use reth_primitives::ChainSpec; -use std::sync::Arc; -use tokio::sync::watch; - -/// TODO: -#[derive(Debug, Default)] -pub struct BeaconConsensusBuilder; - -impl BeaconConsensusBuilder { - /// Create new instance of [BeaconConsensus] and forkchoice notifier. Internally, creates a - /// [watch::channel] for updating the forkchoice state. - pub fn build( - self, - chain_spec: ChainSpec, - ) -> (Arc, watch::Sender) { - let (forkchoice_state_tx, forkchoice_state_rx) = watch::channel(ForkchoiceState::default()); - let inner = Arc::new(BeaconConsensus::new(chain_spec, forkchoice_state_rx)); - (inner, forkchoice_state_tx) - } -} diff --git a/crates/consensus/src/beacon/mod.rs b/crates/consensus/src/beacon/mod.rs deleted file mode 100644 index ec2d4ab5c15..00000000000 --- a/crates/consensus/src/beacon/mod.rs +++ /dev/null @@ -1,7 +0,0 @@ -//! Beacon consensus implementation. - -mod beacon_consensus; -mod builder; - -pub use beacon_consensus::BeaconConsensus; -pub use builder::BeaconConsensusBuilder; diff --git a/crates/executor/Cargo.toml b/crates/executor/Cargo.toml index 18ff91ecfca..c57347abf5a 100644 --- a/crates/executor/Cargo.toml +++ b/crates/executor/Cargo.toml @@ -6,18 +6,24 @@ license = "MIT OR Apache-2.0" repository = "https://github.com/paradigmxyz/reth" readme = "README.md" +[package.metadata.cargo-udeps.ignore] +normal = [ + # Used for diagrams in docs + "aquamarine", +] + [dependencies] # reth reth-primitives = { path = "../primitives" } reth-interfaces = { path = "../interfaces" } reth-revm = { path = "../revm" } +reth-revm-inspectors = { path = "../revm/revm-inspectors" } reth-rlp = { path = "../rlp" } reth-db = { path = "../storage/db" } reth-provider = { path = "../storage/provider" } -revm = { version = "3.0.0"} -# remove from reth and reexport from revm -hashbrown = "0.13" +# revm +revm = { version = "3.0.0" } # common async-trait = "0.1.57" @@ -26,6 +32,10 @@ auto_impl = "1.0" tracing = "0.1.37" tokio = { version = "1.21.2", features = ["sync"] } +# mics +aquamarine = "0.3.0" +parking_lot = { version = "0.12", optional = true } + triehash = "0.8" # See to replace hashers to simplify libraries plain_hasher = "0.2" @@ -38,3 +48,10 @@ sha3 = { version = "0.10", default-features = false } [dev-dependencies] reth-db = { path = "../storage/db", features = ["test-utils"] } +reth-interfaces = { path = "../interfaces", features = ["test-utils"] } +reth-primitives = { path = "../primitives", features = ["test-utils"] } +reth-provider = { path = "../storage/provider", features = ["test-utils"] } +parking_lot = "0.12" + +[features] +test-utils = ["parking_lot"] diff --git a/crates/executor/src/blockchain_tree/block_indices.rs b/crates/executor/src/blockchain_tree/block_indices.rs new file mode 100644 index 00000000000..ba5c63fa9f2 --- /dev/null +++ b/crates/executor/src/blockchain_tree/block_indices.rs @@ -0,0 +1,308 @@ +//! Implementation of [`BlockIndices`] related to [`super::BlockchainTree`] + +use super::chain::{BlockChainId, Chain, ForkBlock}; +use reth_primitives::{BlockHash, BlockNumber, SealedBlockWithSenders}; +use std::collections::{hash_map::Entry, BTreeMap, BTreeSet, HashMap, HashSet}; + +/// Internal indices of the blocks and chains. +/// +/// This is main connection between blocks, chains and canonical chain. +/// +/// It contains a list of canonical block hashes, forks to child blocks, and a mapping of block hash +/// to chain ID. +#[derive(Debug)] +pub struct BlockIndices { + /// Last finalized block. + last_finalized_block: BlockNumber, + /// Canonical chain. Contains N number (depends on `finalization_depth`) of blocks. + /// These blocks are found in fork_to_child but not inside `blocks_to_chain` or + /// `number_to_block` as those are chain specific indices. + canonical_chain: BTreeMap, + /// Index needed when discarding the chain, so we can remove connected chains from tree. + /// NOTE: It contains just a blocks that are forks as a key and not all blocks. + fork_to_child: HashMap>, + /// Block hashes and side chain they belong + blocks_to_chain: HashMap, + /// Utility index. Block number to block hash. Can be used for + /// RPC to fetch all pending block in chain by its number. + index_number_to_block: HashMap>, +} + +impl BlockIndices { + /// Create new block indices structure + pub fn new( + last_finalized_block: BlockNumber, + canonical_chain: BTreeMap, + ) -> Self { + Self { + last_finalized_block, + canonical_chain, + fork_to_child: Default::default(), + blocks_to_chain: Default::default(), + index_number_to_block: Default::default(), + } + } + + /// Return fork to child indices + pub fn fork_to_child(&self) -> &HashMap> { + &self.fork_to_child + } + + /// Return block to chain id + pub fn blocks_to_chain(&self) -> &HashMap { + &self.blocks_to_chain + } + + /// Check if block hash belongs to canonical chain. + pub fn is_block_hash_canonical(&self, block_hash: &BlockHash) -> bool { + self.canonical_chain.range(self.last_finalized_block..).any(|(_, &h)| h == *block_hash) + } + + /// Last finalized block + pub fn last_finalized_block(&self) -> BlockNumber { + self.last_finalized_block + } + + /// Insert non fork block. + pub fn insert_non_fork_block( + &mut self, + block_number: BlockNumber, + block_hash: BlockHash, + chain_id: BlockChainId, + ) { + self.index_number_to_block.entry(block_number).or_default().insert(block_hash); + self.blocks_to_chain.insert(block_hash, chain_id); + } + + /// Insert block to chain and fork child indices of the new chain + pub fn insert_chain(&mut self, chain_id: BlockChainId, chain: &Chain) { + for (number, block) in chain.blocks().iter() { + // add block -> chain_id index + self.blocks_to_chain.insert(block.hash(), chain_id); + // add number -> block + self.index_number_to_block.entry(*number).or_default().insert(block.hash()); + } + let first = chain.first(); + // add parent block -> block index + self.fork_to_child.entry(first.parent_hash).or_default().insert(first.hash()); + } + + /// Get the chain ID the block belongs to + pub fn get_blocks_chain_id(&self, block: &BlockHash) -> Option { + self.blocks_to_chain.get(block).cloned() + } + + /// Update all block hashes. iterate over present and new list of canonical hashes and compare + /// them. Remove all missmatches, disconnect them and return all chains that needs to be + /// removed. + pub fn update_block_hashes( + &mut self, + hashes: BTreeMap, + ) -> BTreeSet { + let mut new_hashes = hashes.iter(); + let mut old_hashes = self.canonical_chain().clone().into_iter(); + + let mut remove = Vec::new(); + + let mut new_hash = new_hashes.next(); + let mut old_hash = old_hashes.next(); + + loop { + let Some(old_block_value) = old_hash else { + // end of old_hashes canonical chain. New chain has more block then old chain. + break + }; + let Some(new_block_value) = new_hash else { + // Old canonical chain had more block than new chain. + // remove all present block. + // this is mostly not going to happen as reorg should make new chain in Tree. + while let Some(rem) = old_hash { + remove.push(rem); + old_hash = old_hashes.next(); + } + break; + }; + // compare old and new canonical block number + match new_block_value.0.cmp(&old_block_value.0) { + std::cmp::Ordering::Less => { + // new chain has more past blocks than old chain + new_hash = new_hashes.next(); + } + std::cmp::Ordering::Equal => { + if *new_block_value.1 != old_block_value.1 { + // remove block hash as it is different + remove.push(old_block_value); + } + new_hash = new_hashes.next(); + old_hash = old_hashes.next(); + } + std::cmp::Ordering::Greater => { + // old chain has more past blocks that new chain + remove.push(old_block_value); + old_hash = old_hashes.next() + } + } + } + self.canonical_chain = hashes; + + remove.into_iter().fold(BTreeSet::new(), |mut fold, (number, hash)| { + fold.extend(self.remove_block(number, hash)); + fold + }) + } + + /// Remove chain from indices and return dependent chains that needs to be removed. + /// Does the cleaning of the tree and removing blocks from the chain. + pub fn remove_chain(&mut self, chain: &Chain) -> BTreeSet { + let mut lose_chains = BTreeSet::new(); + for (block_number, block) in chain.blocks().iter() { + let block_hash = block.hash(); + lose_chains.extend(self.remove_block(*block_number, block_hash)) + } + lose_chains + } + + /// Remove Blocks from indices. + fn remove_block( + &mut self, + block_number: BlockNumber, + block_hash: BlockHash, + ) -> BTreeSet { + // rm number -> block + if let Entry::Occupied(mut entry) = self.index_number_to_block.entry(block_number) { + let set = entry.get_mut(); + set.remove(&block_hash); + // remove set if empty + if set.is_empty() { + entry.remove(); + } + } + + // rm block -> chain_id + self.blocks_to_chain.remove(&block_hash); + + // rm fork -> child + let removed_fork = self.fork_to_child.remove(&block_hash); + removed_fork + .map(|fork_blocks| { + fork_blocks + .into_iter() + .filter_map(|fork_child| self.blocks_to_chain.remove(&fork_child)) + .collect() + }) + .unwrap_or_default() + } + + /// Remove all blocks from canonical list and insert new blocks to it. + /// + /// It is assumed that blocks are interconnected and that they connect to canonical chain + pub fn canonicalize_blocks(&mut self, blocks: &BTreeMap) { + if blocks.is_empty() { + return + } + + // Remove all blocks from canonical chain + let first_number = *blocks.first_key_value().unwrap().0; + + // this will remove all blocks numbers that are going to be replaced. + self.canonical_chain.retain(|num, _| *num < first_number); + + // remove them from block to chain_id index + blocks.iter().map(|(_, b)| (b.number, b.hash(), b.parent_hash)).for_each( + |(number, hash, parent_hash)| { + // rm block -> chain_id + self.blocks_to_chain.remove(&hash); + + // rm number -> block + if let Entry::Occupied(mut entry) = self.index_number_to_block.entry(number) { + let set = entry.get_mut(); + set.remove(&hash); + // remove set if empty + if set.is_empty() { + entry.remove(); + } + } + // rm fork block -> hash + if let Entry::Occupied(mut entry) = self.fork_to_child.entry(parent_hash) { + let set = entry.get_mut(); + set.remove(&hash); + // remove set if empty + if set.is_empty() { + entry.remove(); + } + } + }, + ); + + // insert new canonical + self.canonical_chain.extend(blocks.iter().map(|(number, block)| (*number, block.hash()))) + } + + /// this is function that is going to remove N number of last canonical hashes. + /// + /// NOTE: This is not safe standalone, as it will not disconnect + /// blocks that deppends on unwinded canonical chain. And should be + /// used when canonical chain is reinserted inside Tree. + pub(crate) fn unwind_canonical_chain(&mut self, unwind_to: BlockNumber) { + // this will remove all blocks numbers that are going to be replaced. + self.canonical_chain.retain(|num, _| *num <= unwind_to); + } + + /// Used for finalization of block. + /// Return list of chains for removal that depend on finalized canonical chain. + pub fn finalize_canonical_blocks( + &mut self, + finalized_block: BlockNumber, + num_of_additional_canonical_hashes_to_retain: u64, + ) -> BTreeSet { + // get finalized chains. blocks between [self.last_finalized,finalized_block). + // Dont remove finalized_block, as sidechain can point to it. + let finalized_blocks: Vec = self + .canonical_chain + .iter() + .filter(|(&number, _)| number >= self.last_finalized_block && number < finalized_block) + .map(|(_, hash)| *hash) + .collect(); + + // remove unneeded canonical hashes. + let remove_until = + finalized_block.saturating_sub(num_of_additional_canonical_hashes_to_retain); + self.canonical_chain.retain(|&number, _| number >= remove_until); + + let mut lose_chains = BTreeSet::new(); + + for block_hash in finalized_blocks.into_iter() { + // there is a fork block. + if let Some(fork_blocks) = self.fork_to_child.remove(&block_hash) { + lose_chains = fork_blocks.into_iter().fold(lose_chains, |mut fold, fork_child| { + if let Some(lose_chain) = self.blocks_to_chain.remove(&fork_child) { + fold.insert(lose_chain); + } + fold + }); + } + } + + // set last finalized block. + self.last_finalized_block = finalized_block; + + lose_chains + } + + /// get canonical hash + pub fn canonical_hash(&self, block_number: &BlockNumber) -> Option { + self.canonical_chain.get(block_number).cloned() + } + + /// get canonical tip + pub fn canonical_tip(&self) -> ForkBlock { + let (&number, &hash) = + self.canonical_chain.last_key_value().expect("There is always the canonical chain"); + ForkBlock { number, hash } + } + + /// Canonical chain needs for execution of EVM. It should contains last 256 block hashes. + pub fn canonical_chain(&self) -> &BTreeMap { + &self.canonical_chain + } +} diff --git a/crates/executor/src/blockchain_tree/chain.rs b/crates/executor/src/blockchain_tree/chain.rs new file mode 100644 index 00000000000..4391e6c8b2e --- /dev/null +++ b/crates/executor/src/blockchain_tree/chain.rs @@ -0,0 +1,458 @@ +//! A chain in a [`BlockchainTree`][super::BlockchainTree]. +//! +//! A [`Chain`] contains the state of accounts for the chain after execution of its constituent +//! blocks, as well as a list of the blocks the chain is composed of. +use crate::{post_state::PostState, substate::PostStateProvider}; +use reth_interfaces::{consensus::Consensus, executor::Error as ExecError, Error}; +use reth_primitives::{BlockHash, BlockNumber, SealedBlockWithSenders, SealedHeader, U256}; +use reth_provider::{BlockExecutor, ExecutorFactory, StateProvider}; +use std::collections::BTreeMap; + +/// The ID of a sidechain internally in a [`BlockchainTree`][super::BlockchainTree]. +pub(crate) type BlockChainId = u64; + +/// A side chain. +/// +/// The sidechain contains the state of accounts after execution of its blocks, +/// changesets for those blocks (and their transactions), as well as the blocks themselves. +/// +/// Each chain in the tree are identified using a unique ID. +#[derive(Clone, Debug, Default, PartialEq, Eq)] +pub struct Chain { + /// The state of accounts after execution of the blocks in this chain. + /// + /// This state also contains the individual changes that lead to the current state. + state: PostState, + /// The blocks in this chain. + blocks: BTreeMap, + /// A mapping of each block number in the chain to the highest transition ID in the chain's + /// state after execution of the block. + /// + /// This is used to revert changes in the state until a certain block number when the chain is + /// split. + block_transitions: BTreeMap, +} + +/// Describes a fork block by its number and hash. +#[derive(Clone, Copy, Eq, PartialEq)] +pub struct ForkBlock { + /// Block number of block that chains branches from + pub number: u64, + /// Block hash of block that chains branches from + pub hash: BlockHash, +} + +impl ForkBlock { + /// Return the `(block_number, block_hash)` tuple for this fork block. + pub fn num_hash(&self) -> (BlockNumber, BlockHash) { + (self.number, self.hash) + } +} + +impl Chain { + /// Get the blocks in this chain. + pub fn blocks(&self) -> &BTreeMap { + &self.blocks + } + + /// Destructure the chain into its inner components, the blocks and the state. + pub fn into_inner(self) -> (BTreeMap, PostState) { + (self.blocks, self.state) + } + + /// Get the block at which this chain forked. + pub fn fork_block(&self) -> ForkBlock { + let tip = self.first(); + ForkBlock { number: tip.number.saturating_sub(1), hash: tip.parent_hash } + } + + /// Get the block number at which this chain forked. + pub fn fork_block_number(&self) -> BlockNumber { + self.first().number.saturating_sub(1) + } + + /// Get the block hash at which this chain forked. + pub fn fork_block_hash(&self) -> BlockHash { + self.first().parent_hash + } + + /// Get the first block in this chain. + pub fn first(&self) -> &SealedBlockWithSenders { + self.blocks.first_key_value().expect("Chain has at least one block for first").1 + } + + /// Get the tip of the chain. + /// + /// # Note + /// + /// Chains always have at least one block. + pub fn tip(&self) -> &SealedBlockWithSenders { + self.blocks.last_key_value().expect("Chain should have at least one block").1 + } + + /// Create new chain with given blocks and post state. + pub fn new(blocks: Vec<(SealedBlockWithSenders, PostState)>) -> Self { + let mut state = PostState::default(); + let mut block_transitions = BTreeMap::new(); + let mut block_num_hash = BTreeMap::new(); + for (block, block_state) in blocks.into_iter() { + state.extend(block_state); + block_transitions.insert(block.number, state.transitions_count()); + block_num_hash.insert(block.number, block); + } + + Self { state, block_transitions, blocks: block_num_hash } + } + + /// Create a new chain that forks off of the canonical chain. + pub fn new_canonical_fork( + block: &SealedBlockWithSenders, + parent_header: &SealedHeader, + canonical_block_hashes: &BTreeMap, + provider: &SP, + consensus: &C, + factory: &EF, + ) -> Result { + let state = PostState::default(); + let empty = BTreeMap::new(); + + let state_provider = + PostStateProvider::new(&state, provider, &empty, canonical_block_hashes); + + let changeset = Self::validate_and_execute( + block.clone(), + parent_header, + state_provider, + consensus, + factory, + )?; + + Ok(Self::new(vec![(block.clone(), changeset)])) + } + + /// Create a new chain that forks off of an existing sidechain. + pub fn new_chain_fork( + &self, + block: SealedBlockWithSenders, + side_chain_block_hashes: BTreeMap, + canonical_block_hashes: &BTreeMap, + provider: &SP, + consensus: &C, + factory: &EF, + ) -> Result { + let parent_number = block.number - 1; + let parent = self + .blocks + .get(&parent_number) + .ok_or(ExecError::BlockNumberNotFoundInChain { block_number: parent_number })?; + + let revert_to_transition_id = self + .block_transitions + .get(&parent.number) + .expect("Should have the transition ID for the parent block"); + let mut state = self.state.clone(); + + // Revert state to the state after execution of the parent block + state.revert_to(*revert_to_transition_id); + + // Revert changesets to get the state of the parent that we need to apply the change. + let state_provider = PostStateProvider::new( + &state, + provider, + &side_chain_block_hashes, + canonical_block_hashes, + ); + let block_state = + Self::validate_and_execute(block.clone(), parent, state_provider, consensus, factory)?; + state.extend(block_state); + + let chain = Self { + block_transitions: BTreeMap::from([(block.number, state.transitions_count())]), + state, + blocks: BTreeMap::from([(block.number, block)]), + }; + + // If all is okay, return new chain back. Present chain is not modified. + Ok(chain) + } + + /// Validate and execute the given block. + fn validate_and_execute( + block: SealedBlockWithSenders, + parent_block: &SealedHeader, + state_provider: PostStateProvider<'_, SP>, + consensus: &C, + factory: &EF, + ) -> Result { + consensus.validate_header(&block, U256::MAX)?; + consensus.pre_validate_header(&block, parent_block)?; + consensus.pre_validate_block(&block)?; + + let (unseal, senders) = block.into_components(); + let unseal = unseal.unseal(); + + factory + .with_sp(state_provider) + .execute_and_verify_receipt(&unseal, U256::MAX, Some(senders)) + .map_err(Into::into) + } + + /// Validate and execute the given block, and append it to this chain. + pub fn append_block( + &mut self, + block: SealedBlockWithSenders, + side_chain_block_hashes: BTreeMap, + canonical_block_hashes: &BTreeMap, + provider: &SP, + consensus: &C, + factory: &EF, + ) -> Result<(), Error> { + let (_, parent_block) = self.blocks.last_key_value().expect("Chain has at least one block"); + + let block_state = Self::validate_and_execute( + block.clone(), + parent_block, + PostStateProvider::new( + &self.state, + provider, + &side_chain_block_hashes, + canonical_block_hashes, + ), + consensus, + factory, + )?; + self.state.extend(block_state); + self.block_transitions.insert(block.number, self.state.transitions_count()); + self.blocks.insert(block.number, block); + Ok(()) + } + + /// Merge two chains by appending the given chain into the current one. + /// + /// The state of accounts for this chain is set to the state of the newest chain. + pub fn append_chain(&mut self, chain: Chain) -> Result<(), Error> { + let chain_tip = self.tip(); + if chain_tip.hash != chain.fork_block_hash() { + return Err(ExecError::AppendChainDoesntConnect { + chain_tip: chain_tip.num_hash(), + other_chain_fork: chain.fork_block().num_hash(), + } + .into()) + } + + // Insert blocks from other chain + self.blocks.extend(chain.blocks.into_iter()); + let current_transition_count = self.state.transitions_count(); + self.state.extend(chain.state); + + // Update the block transition mapping, shifting the transition ID by the current number of + // transitions in *this* chain + for (block_number, transition_id) in chain.block_transitions.iter() { + self.block_transitions.insert(*block_number, transition_id + current_transition_count); + } + Ok(()) + } + + /// Split this chain at the given block. + /// + /// The given block will be the first block in the first returned chain. + /// + /// If the given block is not found, [`ChainSplit::NoSplitPending`] is returned. + /// Split chain at the number or hash, block with given number will be included at first chain. + /// If any chain is empty (Does not have blocks) None will be returned. + /// + /// # Note + /// + /// The block number to transition ID mapping is only found in the second chain, making it + /// impossible to perform any state reverts on the first chain. + /// + /// The second chain only contains the changes that were reverted on the first chain; however, + /// it retains the up to date state as if the chains were one, i.e. the second chain is an + /// extension of the first. + pub fn split(mut self, split_at: SplitAt) -> ChainSplit { + let chain_tip = *self.blocks.last_entry().expect("chain is never empty").key(); + let block_number = match split_at { + SplitAt::Hash(block_hash) => { + let block_number = self + .blocks + .iter() + .find_map(|(num, block)| (block.hash() == block_hash).then_some(*num)); + let Some(block_number) = block_number else { return ChainSplit::NoSplitPending(self)}; + // If block number is same as tip whole chain is becoming canonical. + if block_number == chain_tip { + return ChainSplit::NoSplitCanonical(self) + } + block_number + } + SplitAt::Number(block_number) => { + if block_number >= chain_tip { + return ChainSplit::NoSplitCanonical(self) + } + if block_number < *self.blocks.first_entry().expect("chain is never empty").key() { + return ChainSplit::NoSplitPending(self) + } + block_number + } + }; + + let higher_number_blocks = self.blocks.split_off(&(block_number + 1)); + + let mut canonical_state = std::mem::take(&mut self.state); + let new_state = canonical_state.split_at( + *self.block_transitions.get(&(block_number)).expect("Unknown block transition ID"), + ); + self.state = new_state; + + ChainSplit::Split { + canonical: Chain { + state: canonical_state, + block_transitions: BTreeMap::new(), + blocks: self.blocks, + }, + pending: Chain { + state: self.state, + block_transitions: self.block_transitions, + blocks: higher_number_blocks, + }, + } + } +} + +/// Used in spliting the chain. +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub enum SplitAt { + /// Split at block number. + Number(BlockNumber), + /// Split at block hash. + Hash(BlockHash), +} + +/// Result of spliting chain. +#[derive(Clone, Debug, PartialEq, Eq)] +pub enum ChainSplit { + /// Chain is not splited. Pending chain is returned. + /// Given block split is higher than last block. + /// Or in case of split by hash when hash is unknown. + NoSplitPending(Chain), + /// Chain is not splited. Canonical chain is returned. + /// Given block split is lower than first block. + NoSplitCanonical(Chain), + /// Chain is splited in two. + /// Given block split is contained in first chain. + Split { + /// Left contains lower block number that get canonicalized. + /// And substate is empty and not usable. + canonical: Chain, + /// Right contains higher block number, that is still pending. + /// And substate from original chain is moved here. + pending: Chain, + }, +} + +#[cfg(test)] +mod tests { + use super::*; + use reth_primitives::{Account, H160, H256}; + + #[test] + fn chain_append() { + let block = SealedBlockWithSenders::default(); + let block1_hash = H256([0x01; 32]); + let block2_hash = H256([0x02; 32]); + let block3_hash = H256([0x03; 32]); + let block4_hash = H256([0x04; 32]); + + let mut block1 = block.clone(); + let mut block2 = block.clone(); + let mut block3 = block.clone(); + let mut block4 = block; + + block1.block.header.hash = block1_hash; + block2.block.header.hash = block2_hash; + block3.block.header.hash = block3_hash; + block4.block.header.hash = block4_hash; + + block3.block.header.header.parent_hash = block2_hash; + + let mut chain1 = + Chain { blocks: BTreeMap::from([(1, block1), (2, block2)]), ..Default::default() }; + + let chain2 = + Chain { blocks: BTreeMap::from([(3, block3), (4, block4)]), ..Default::default() }; + + assert_eq!(chain1.append_chain(chain2.clone()), Ok(())); + + // chain1 got changed so this will fail + assert!(chain1.append_chain(chain2).is_err()); + } + + #[test] + fn test_number_split() { + let mut base_state = PostState::default(); + let mut account = Account::default(); + account.nonce = 10; + base_state.create_account(H160([1; 20]), account); + base_state.finish_transition(); + + let mut block_state1 = PostState::default(); + block_state1.create_account(H160([2; 20]), Account::default()); + block_state1.finish_transition(); + + let mut block_state2 = PostState::default(); + block_state2.create_account(H160([3; 20]), Account::default()); + block_state2.finish_transition(); + + let mut block1 = SealedBlockWithSenders::default(); + let block1_hash = H256([15; 32]); + block1.number = 1; + block1.hash = block1_hash; + block1.senders.push(H160([4; 20])); + + let mut block2 = SealedBlockWithSenders::default(); + let block2_hash = H256([16; 32]); + block2.number = 2; + block2.hash = block2_hash; + block2.senders.push(H160([4; 20])); + + let chain = Chain::new(vec![ + (block1.clone(), block_state1.clone()), + (block2.clone(), block_state2.clone()), + ]); + + let mut split1_state = chain.state.clone(); + let split2_state = split1_state.split_at(*chain.block_transitions.get(&1).unwrap()); + + let chain_split1 = Chain { + state: split1_state, + block_transitions: BTreeMap::new(), + blocks: BTreeMap::from([(1, block1.clone())]), + }; + + let chain_split2 = Chain { + state: split2_state, + block_transitions: chain.block_transitions.clone(), + blocks: BTreeMap::from([(2, block2.clone())]), + }; + + // split in two + assert_eq!( + chain.clone().split(SplitAt::Hash(block1_hash)), + ChainSplit::Split { canonical: chain_split1, pending: chain_split2 } + ); + + // split at unknown block hash + assert_eq!( + chain.clone().split(SplitAt::Hash(H256([100; 32]))), + ChainSplit::NoSplitPending(chain.clone()) + ); + + // split at higher number + assert_eq!( + chain.clone().split(SplitAt::Number(10)), + ChainSplit::NoSplitCanonical(chain.clone()) + ); + + // split at lower number + assert_eq!(chain.clone().split(SplitAt::Number(0)), ChainSplit::NoSplitPending(chain)); + } +} diff --git a/crates/executor/src/blockchain_tree/config.rs b/crates/executor/src/blockchain_tree/config.rs new file mode 100644 index 00000000000..d830f066fd0 --- /dev/null +++ b/crates/executor/src/blockchain_tree/config.rs @@ -0,0 +1,60 @@ +//! Blockchain tree configuration + +/// The configuration for the blockchain tree. +#[derive(Clone, Debug)] +pub struct BlockchainTreeConfig { + /// Number of blocks after the last finalized block that we are storing. + /// + /// It should be more than the finalization window for the canonical chain. + max_blocks_in_chain: u64, + /// The number of blocks that can be re-orged (finalization windows) + max_reorg_depth: u64, + /// For EVM's "BLOCKHASH" opcode we require last 256 block hashes. So we need to specify + /// at least `additional_canonical_block_hashes`+`max_reorg_depth`, for eth that would be + /// 256+64. + num_of_additional_canonical_block_hashes: u64, +} + +impl Default for BlockchainTreeConfig { + fn default() -> Self { + // The defaults for Ethereum mainnet + Self { + // Gasper allows reorgs of any length from 1 to 64. + max_reorg_depth: 64, + // This default is just an assumption. Has to be greater than the `max_reorg_depth`. + max_blocks_in_chain: 65, + // EVM requires that last 256 block hashes are available. + num_of_additional_canonical_block_hashes: 256, + } + } +} + +impl BlockchainTreeConfig { + /// Create tree configuration. + pub fn new( + max_reorg_depth: u64, + max_blocks_in_chain: u64, + num_of_additional_canonical_block_hashes: u64, + ) -> Self { + if max_reorg_depth > max_blocks_in_chain { + panic!("Side chain size should be more then finalization window"); + } + Self { max_blocks_in_chain, max_reorg_depth, num_of_additional_canonical_block_hashes } + } + + /// Return the maximum reorg depth. + pub fn max_reorg_depth(&self) -> u64 { + self.max_reorg_depth + } + + /// Return the maximum number of blocks in one chain. + pub fn max_blocks_in_chain(&self) -> u64 { + self.max_blocks_in_chain + } + + /// Return number of additional canonical block hashes that we need to retain + /// in order to have enough information for EVM execution. + pub fn num_of_additional_canonical_block_hashes(&self) -> u64 { + self.num_of_additional_canonical_block_hashes + } +} diff --git a/crates/executor/src/blockchain_tree/externals.rs b/crates/executor/src/blockchain_tree/externals.rs new file mode 100644 index 00000000000..874760ff420 --- /dev/null +++ b/crates/executor/src/blockchain_tree/externals.rs @@ -0,0 +1,46 @@ +//! Blockchain tree externals. + +use reth_db::database::Database; +use reth_primitives::ChainSpec; +use reth_provider::ShareableDatabase; +use std::sync::Arc; + +/// A container for external components. +/// +/// This is a simple container for external components used throughout the blockchain tree +/// implementation: +/// +/// - A handle to the database +/// - A handle to the consensus engine +/// - The executor factory to exexcute blocks with +/// - The chain spec +#[derive(Debug)] +pub struct TreeExternals { + /// The database, used to commit the canonical chain, or unwind it. + pub db: Arc, + /// The consensus engine. + pub consensus: C, + /// The executor factory to execute blocks with. + pub executor_factory: EF, + /// The chain spec. + pub chain_spec: Arc, +} + +impl TreeExternals { + /// Create new tree externals. + pub fn new( + db: Arc, + consensus: C, + executor_factory: EF, + chain_spec: Arc, + ) -> Self { + Self { db, consensus, executor_factory, chain_spec } + } +} + +impl TreeExternals { + /// Return shareable database helper structure. + pub fn shareable_db(&self) -> ShareableDatabase<&DB> { + ShareableDatabase::new(&self.db, self.chain_spec.clone()) + } +} diff --git a/crates/executor/src/blockchain_tree/mod.rs b/crates/executor/src/blockchain_tree/mod.rs new file mode 100644 index 00000000000..481c60d77d2 --- /dev/null +++ b/crates/executor/src/blockchain_tree/mod.rs @@ -0,0 +1,974 @@ +//! Implementation of [`BlockchainTree`] +use chain::{BlockChainId, Chain, ForkBlock}; +use reth_db::{cursor::DbCursorRO, database::Database, tables, transaction::DbTx}; +use reth_interfaces::{consensus::Consensus, executor::Error as ExecError, Error}; +use reth_primitives::{BlockHash, BlockNumber, SealedBlock, SealedBlockWithSenders}; +use reth_provider::{ + providers::ChainState, ExecutorFactory, HeaderProvider, StateProviderFactory, Transaction, +}; +use std::{ + collections::{BTreeMap, HashMap}, + ops::DerefMut, +}; + +pub mod block_indices; +use block_indices::BlockIndices; + +pub mod chain; +use chain::{ChainSplit, SplitAt}; + +pub mod config; +use config::BlockchainTreeConfig; + +pub mod externals; +use externals::TreeExternals; + +#[cfg_attr(doc, aquamarine::aquamarine)] +/// Tree of chains and its identifications. +/// +/// Mermaid flowchart represent all blocks that can appear in blockchain. +/// Green blocks belong to canonical chain and are saved inside database table, they are our main +/// chain. Pending blocks and sidechains are found in memory inside [`BlockchainTree`]. +/// Both pending and sidechains have same mechanisms only difference is when they got committed to +/// database. For pending it is just append operation but for sidechains they need to move current +/// canonical blocks to BlockchainTree flush sidechain to the database to become canonical chain. +/// ```mermaid +/// flowchart BT +/// subgraph canonical chain +/// CanonState:::state +/// block0canon:::canon -->block1canon:::canon -->block2canon:::canon -->block3canon:::canon --> block4canon:::canon --> block5canon:::canon +/// end +/// block5canon --> block6pending1:::pending +/// block5canon --> block6pending2:::pending +/// subgraph sidechain2 +/// S2State:::state +/// block3canon --> block4s2:::sidechain --> block5s2:::sidechain +/// end +/// subgraph sidechain1 +/// S1State:::state +/// block2canon --> block3s1:::sidechain --> block4s1:::sidechain --> block5s1:::sidechain --> block6s1:::sidechain +/// end +/// classDef state fill:#1882C4 +/// classDef canon fill:#8AC926 +/// classDef pending fill:#FFCA3A +/// classDef sidechain fill:#FF595E +/// ``` +/// +/// +/// main functions: +/// * [BlockchainTree::insert_block]: Connect block to chain, execute it and if valid insert block +/// inside tree. +/// * [BlockchainTree::finalize_block]: Remove chains that join to now finalized block, as chain +/// becomes invalid. +/// * [BlockchainTree::make_canonical]: Check if we have the hash of block that we want to finalize +/// and commit it to db. If we dont have the block, pipeline syncing should start to fetch the +/// blocks from p2p. Do reorg in tables if canonical chain if needed. +#[derive(Debug)] +pub struct BlockchainTree { + /// The tracked chains and their current data. + chains: HashMap, + /// Static blockchain ID generator + block_chain_id_generator: u64, + /// Indices to block and their connection to the canonical chain. + block_indices: BlockIndices, + /// External components (the database, consensus engine etc.) + externals: TreeExternals, + /// Tree configuration + config: BlockchainTreeConfig, +} + +/// From Engine API spec, block inclusion can be valid, accepted or invalid. +/// Invalid case is already covered by error but we needs to make distinction +/// between if it is valid (extends canonical chain) or just accepted (is side chain). +/// If we dont know the block parent we are returning Disconnected status +/// as we can't make a claim if block is valid or not. +#[derive(Clone, Copy, Debug, Eq, PartialEq)] +pub enum BlockStatus { + /// If block validation is valid and block extends canonical chain. + /// In BlockchainTree sense it forks on canonical tip. + Valid, + /// If block validation is valid but block does not extend canonical chain + /// (It is side chain) or hasn't been fully validated but ancestors of a payload are known. + Accepted, + /// If blocks is not connected to canonical chain. + Disconnected, +} + +/// A container that wraps chains and block indices to allow searching for block hashes across all +/// sidechains. +pub struct BlockHashes<'a> { + /// The current tracked chains. + pub chains: &'a mut HashMap, + /// The block indices for all chains. + pub indices: &'a BlockIndices, +} + +impl BlockchainTree { + /// Create a new blockchain tree. + pub fn new( + externals: TreeExternals, + config: BlockchainTreeConfig, + ) -> Result { + let max_reorg_depth = config.max_reorg_depth(); + + let last_canonical_hashes = externals + .db + .tx()? + .cursor_read::()? + .walk_back(None)? + .take((max_reorg_depth + config.num_of_additional_canonical_block_hashes()) as usize) + .collect::, _>>()?; + + // TODO(rakita) save last finalized block inside database but for now just take + // tip-max_reorg_depth + // task: https://github.com/paradigmxyz/reth/issues/1712 + let (last_finalized_block_number, _) = + if last_canonical_hashes.len() > max_reorg_depth as usize { + last_canonical_hashes[max_reorg_depth as usize] + } else { + // it is in reverse order from tip to N + last_canonical_hashes.last().cloned().unwrap_or_default() + }; + + Ok(Self { + externals, + block_chain_id_generator: 0, + chains: Default::default(), + block_indices: BlockIndices::new( + last_finalized_block_number, + BTreeMap::from_iter(last_canonical_hashes.into_iter()), + ), + config, + }) + } + + /// Create a new sidechain by forking the given chain, or append the block if the parent block + /// is the top of the given chain. + fn fork_side_chain( + &mut self, + block: SealedBlockWithSenders, + chain_id: BlockChainId, + ) -> Result { + let block_hashes = self.all_chain_hashes(chain_id); + + // get canonical fork. + let canonical_fork = + self.canonical_fork(chain_id).ok_or(ExecError::BlockChainIdConsistency { chain_id })?; + + // get chain that block needs to join to. + let parent_chain = self + .chains + .get_mut(&chain_id) + .ok_or(ExecError::BlockChainIdConsistency { chain_id })?; + let chain_tip = parent_chain.tip().hash(); + + let canonical_block_hashes = self.block_indices.canonical_chain(); + + // get canonical tip + let (_, canonical_tip_hash) = + canonical_block_hashes.last_key_value().map(|(i, j)| (*i, *j)).unwrap_or_default(); + + let db = self.externals.shareable_db(); + let provider = if canonical_fork.hash == canonical_tip_hash { + ChainState::boxed(db.latest()?) + } else { + ChainState::boxed(db.history_by_block_number(canonical_fork.number)?) + }; + + // append the block if it is continuing the chain. + if chain_tip == block.parent_hash { + let block_hash = block.hash(); + let block_number = block.number; + parent_chain.append_block( + block, + block_hashes, + canonical_block_hashes, + &provider, + &self.externals.consensus, + &self.externals.executor_factory, + )?; + drop(provider); + self.block_indices.insert_non_fork_block(block_number, block_hash, chain_id); + Ok(BlockStatus::Valid) + } else { + let chain = parent_chain.new_chain_fork( + block, + block_hashes, + canonical_block_hashes, + &provider, + &self.externals.consensus, + &self.externals.executor_factory, + )?; + // release the lifetime with a drop + drop(provider); + self.insert_chain(chain); + Ok(BlockStatus::Accepted) + } + } + + /// Create a new sidechain by forking the canonical chain. + // TODO(onbjerg): Is this not a specialized case of [`fork_side_chain`]? If so, can we merge? + pub fn fork_canonical_chain( + &mut self, + block: SealedBlockWithSenders, + ) -> Result { + let canonical_block_hashes = self.block_indices.canonical_chain(); + let (_, canonical_tip) = + canonical_block_hashes.last_key_value().map(|(i, j)| (*i, *j)).unwrap_or_default(); + + // create state provider + let db = self.externals.shareable_db(); + let parent_header = db + .header(&block.parent_hash)? + .ok_or(ExecError::CanonicalChain { block_hash: block.parent_hash })?; + + let block_status; + let provider = if block.parent_hash == canonical_tip { + block_status = BlockStatus::Valid; + ChainState::boxed(db.latest()?) + } else { + block_status = BlockStatus::Accepted; + ChainState::boxed(db.history_by_block_number(block.number - 1)?) + }; + + let parent_header = parent_header.seal(block.parent_hash); + let chain = Chain::new_canonical_fork( + &block, + &parent_header, + canonical_block_hashes, + &provider, + &self.externals.consensus, + &self.externals.executor_factory, + )?; + drop(provider); + self.insert_chain(chain); + Ok(block_status) + } + + /// Get all block hashes from a sidechain that are not part of the canonical chain. + /// + /// This is a one time operation per block. + /// + /// # Note + /// + /// This is not cached in order to save memory. + fn all_chain_hashes(&self, chain_id: BlockChainId) -> BTreeMap { + // find chain and iterate over it, + let mut chain_id = chain_id; + let mut hashes = BTreeMap::new(); + loop { + let Some(chain) = self.chains.get(&chain_id) else { return hashes }; + hashes.extend(chain.blocks().values().map(|b| (b.number, b.hash()))); + + let fork_block = chain.fork_block_hash(); + if let Some(next_chain_id) = self.block_indices.get_blocks_chain_id(&fork_block) { + chain_id = next_chain_id; + } else { + // if there is no fork block that point to other chains, break the loop. + // it means that this fork joins to canonical block. + break + } + } + hashes + } + + /// Get the block at which the given chain forked from the current canonical chain. + /// + /// This is used to figure out what kind of state provider the executor should use to execute + /// the block. + /// + /// Returns `None` if the chain is not known. + fn canonical_fork(&self, chain_id: BlockChainId) -> Option { + let mut chain_id = chain_id; + let mut fork; + loop { + // chain fork block + fork = self.chains.get(&chain_id)?.fork_block(); + // get fork block chain + if let Some(fork_chain_id) = self.block_indices.get_blocks_chain_id(&fork.hash) { + chain_id = fork_chain_id; + continue + } + break + } + (self.block_indices.canonical_hash(&fork.number) == Some(fork.hash)).then_some(fork) + } + + /// Insert a chain into the tree. + /// + /// Inserts a chain into the tree and builds the block indices. + fn insert_chain(&mut self, chain: Chain) -> BlockChainId { + let chain_id = self.block_chain_id_generator; + self.block_chain_id_generator += 1; + self.block_indices.insert_chain(chain_id, &chain); + // add chain_id -> chain index + self.chains.insert(chain_id, chain); + chain_id + } + + /// Insert a new block in the tree. + /// + /// # Note + /// + /// This recovers transaction signers (unlike [`BlockchainTree::insert_block_with_senders`]). + pub fn insert_block(&mut self, block: SealedBlock) -> Result { + let block = block.seal_with_senders().ok_or(ExecError::SenderRecoveryError)?; + self.insert_block_with_senders(&block) + } + + /// Insert a block (with senders recovered) in the tree. + /// + /// Returns `true` if: + /// + /// - The block is already part of a sidechain in the tree, or + /// - The block is already part of the canonical chain, or + /// - The parent is part of a sidechain in the tree, and we can fork at this block, or + /// - The parent is part of the canonical chain, and we can fork at this block + /// + /// Otherwise `false` is returned, indicating that neither the block nor its parent is part of + /// the chain or any sidechains. + /// + /// This means that if the block becomes canonical, we need to fetch the missing blocks over + /// P2P. + /// + /// # Note + /// + /// If the senders have not already been recovered, call [`BlockchainTree::insert_block`] + /// instead. + pub fn insert_block_with_senders( + &mut self, + block: &SealedBlockWithSenders, + ) -> Result { + // check if block number is inside pending block slide + let last_finalized_block = self.block_indices.last_finalized_block(); + if block.number <= last_finalized_block { + return Err(ExecError::PendingBlockIsFinalized { + block_number: block.number, + block_hash: block.hash(), + last_finalized: last_finalized_block, + } + .into()) + } + + // we will not even try to insert blocks that are too far in future. + if block.number > last_finalized_block + self.config.max_blocks_in_chain() { + return Err(ExecError::PendingBlockIsInFuture { + block_number: block.number, + block_hash: block.hash(), + last_finalized: last_finalized_block, + } + .into()) + } + + // check if block known and is already inside Tree + if let Some(chain_id) = self.block_indices.get_blocks_chain_id(&block.hash()) { + let canonical_fork = self.canonical_fork(chain_id).expect("Chain id is valid"); + // if block chain extends canonical chain + if canonical_fork == self.block_indices.canonical_tip() { + return Ok(BlockStatus::Valid) + } else { + return Ok(BlockStatus::Accepted) + } + } + + // check if block is part of canonical chain + if self.block_indices.canonical_hash(&block.number) == Some(block.hash()) { + // block is part of canonical chain + return Ok(BlockStatus::Valid) + } + + // check if block parent can be found in Tree + if let Some(parent_chain) = self.block_indices.get_blocks_chain_id(&block.parent_hash) { + return self.fork_side_chain(block.clone(), parent_chain) + // TODO save pending block to database + // https://github.com/paradigmxyz/reth/issues/1713 + } + + // if not found, check if the parent can be found inside canonical chain. + if Some(block.parent_hash) == self.block_indices.canonical_hash(&(block.number - 1)) { + // create new chain that points to that block + return self.fork_canonical_chain(block.clone()) + // TODO save pending block to database + // https://github.com/paradigmxyz/reth/issues/1713 + } + // NOTE: Block doesn't have a parent, and if we receive this block in `make_canonical` + // function this could be a trigger to initiate p2p syncing, as we are missing the + // parent. + Ok(BlockStatus::Disconnected) + } + + /// Finalize blocks up until and including `finalized_block`, and remove them from the tree. + pub fn finalize_block(&mut self, finalized_block: BlockNumber) { + let mut remove_chains = self.block_indices.finalize_canonical_blocks( + finalized_block, + self.config.num_of_additional_canonical_block_hashes(), + ); + + while let Some(chain_id) = remove_chains.pop_first() { + if let Some(chain) = self.chains.remove(&chain_id) { + remove_chains.extend(self.block_indices.remove_chain(&chain)); + } + } + } + + /// Reads the last `N` canonical hashes from the database and updates the block indices of the + /// tree. + /// + /// `N` is the `max_reorg_depth` plus the number of block hashes needed to satisfy the + /// `BLOCKHASH` opcode in the EVM. + /// + /// # Note + /// + /// This finalizes `last_finalized_block` prior to reading the canonical hashes (using + /// [`BlockchainTree::finalize_block`]). + pub fn restore_canonical_hashes( + &mut self, + last_finalized_block: BlockNumber, + ) -> Result<(), Error> { + self.finalize_block(last_finalized_block); + + let num_of_canonical_hashes = + self.config.max_reorg_depth() + self.config.num_of_additional_canonical_block_hashes(); + + let last_canonical_hashes = self + .externals + .db + .tx()? + .cursor_read::()? + .walk_back(None)? + .take(num_of_canonical_hashes as usize) + .collect::, _>>()?; + + let mut remove_chains = self.block_indices.update_block_hashes(last_canonical_hashes); + + // remove all chains that got discarded + while let Some(chain_id) = remove_chains.first() { + if let Some(chain) = self.chains.remove(chain_id) { + remove_chains.extend(self.block_indices.remove_chain(&chain)); + } + } + + Ok(()) + } + + /// Split a sidechain at the given point, and return the canonical part of it. + /// + /// The pending part of the chain is reinserted into the tree with the same `chain_id`. + fn split_chain(&mut self, chain_id: BlockChainId, chain: Chain, split_at: SplitAt) -> Chain { + match chain.split(split_at) { + ChainSplit::Split { canonical, pending } => { + // rest of splited chain is inserted back with same chain_id. + self.block_indices.insert_chain(chain_id, &pending); + self.chains.insert(chain_id, pending); + canonical + } + ChainSplit::NoSplitCanonical(canonical) => canonical, + ChainSplit::NoSplitPending(_) => { + panic!("Should not happen as block indices guarantee structure of blocks") + } + } + } + + /// Make a block and its parent part of the canonical chain. + /// + /// # Note + /// + /// This unwinds the database if necessary, i.e. if parts of the canonical chain have been + /// re-orged. + /// + /// # Returns + /// + /// Returns `Ok` if the blocks were canonicalized, or if the blocks were already canonical. + pub fn make_canonical(&mut self, block_hash: &BlockHash) -> Result<(), Error> { + let chain_id = if let Some(chain_id) = self.block_indices.get_blocks_chain_id(block_hash) { + chain_id + } else { + // If block is already canonical don't return error. + if self.block_indices.is_block_hash_canonical(block_hash) { + return Ok(()) + } + return Err(ExecError::BlockHashNotFoundInChain { block_hash: *block_hash }.into()) + }; + let chain = self.chains.remove(&chain_id).expect("To be present"); + + // we are spliting chain as there is possibility that only part of chain get canonicalized. + let canonical = self.split_chain(chain_id, chain, SplitAt::Hash(*block_hash)); + + let mut block_fork = canonical.fork_block(); + let mut block_fork_number = canonical.fork_block_number(); + let mut chains_to_promote = vec![canonical]; + + // loop while fork blocks are found in Tree. + while let Some(chain_id) = self.block_indices.get_blocks_chain_id(&block_fork.hash) { + let chain = self.chains.remove(&chain_id).expect("To fork to be present"); + block_fork = chain.fork_block(); + let canonical = self.split_chain(chain_id, chain, SplitAt::Number(block_fork_number)); + block_fork_number = canonical.fork_block_number(); + chains_to_promote.push(canonical); + } + + let old_tip = self.block_indices.canonical_tip(); + // Merge all chain into one chain. + let mut new_canon_chain = chains_to_promote.pop().expect("There is at least one block"); + for chain in chains_to_promote.into_iter().rev() { + new_canon_chain.append_chain(chain).expect("We have just build the chain."); + } + + // update canonical index + self.block_indices.canonicalize_blocks(new_canon_chain.blocks()); + + // if joins to the tip + if new_canon_chain.fork_block_hash() == old_tip.hash { + // append to database + self.commit_canonical(new_canon_chain)?; + } else { + // it forks to canonical block that is not the tip. + + let canon_fork = new_canon_chain.fork_block(); + // sanity check + if self.block_indices.canonical_hash(&canon_fork.number) != Some(canon_fork.hash) { + unreachable!("all chains should point to canonical chain."); + } + + let old_canon_chain = self.revert_canonical(canon_fork.number)?; + // commit new canonical chain. + self.commit_canonical(new_canon_chain)?; + // insert old canon chain + self.insert_chain(old_canon_chain); + } + + Ok(()) + } + + /// Canonicalize the given chain and commit it to the database. + fn commit_canonical(&mut self, chain: Chain) -> Result<(), Error> { + let mut tx = Transaction::new(&self.externals.db)?; + let new_tip_number = chain.tip().number; + let new_tip_hash = chain.tip().hash; + let first_transition_id = + tx.get_block_transition(chain.first().number.saturating_sub(1)) + .map_err(|e| ExecError::CanonicalCommit { inner: e.to_string() })?; + let expected_state_root = chain.tip().state_root; + let fork_block = chain.fork_block_number(); + let (blocks, state) = chain.into_inner(); + let num_transitions = state.transitions_count(); + + // Write state and changesets to the database + state + .write_to_db(tx.deref_mut(), first_transition_id) + .map_err(|e| ExecError::CanonicalCommit { inner: e.to_string() })?; + + // Insert the blocks + for block in blocks.into_values() { + tx.insert_block(block) + .map_err(|e| ExecError::CanonicalCommit { inner: e.to_string() })?; + } + tx.insert_hashes( + fork_block, + first_transition_id, + first_transition_id + num_transitions as u64, + new_tip_number, + new_tip_hash, + expected_state_root, + ) + .map_err(|e| ExecError::CanonicalCommit { inner: e.to_string() })?; + + // Update pipeline progress + tx.update_pipeline_stages(new_tip_number) + .map_err(|e| ExecError::PipelineStatusUpdate { inner: e.to_string() })?; + + tx.commit()?; + + Ok(()) + } + + /// Unwind tables and put it inside state + pub fn unwind(&mut self, unwind_to: BlockNumber) -> Result<(), Error> { + // nothing to be done if unwind_to is higher then the tip + if self.block_indices.canonical_tip().number <= unwind_to { + return Ok(()) + } + // revert `N` blocks from current canonical chain and put them inside BlockchanTree + let old_canon_chain = self.revert_canonical(unwind_to)?; + + // check if there is block in chain + if old_canon_chain.blocks().is_empty() { + return Ok(()) + } + self.block_indices.unwind_canonical_chain(unwind_to); + // insert old canonical chain to BlockchainTree. + self.insert_chain(old_canon_chain); + + Ok(()) + } + + /// Revert canonical blocks from the database and return them. + /// + /// The block, `revert_until`, is non-inclusive, i.e. `revert_until` stays in the database. + fn revert_canonical(&mut self, revert_until: BlockNumber) -> Result { + // read data that is needed for new sidechain + + let mut tx = Transaction::new(&self.externals.db)?; + + // read block and execution result from database. and remove traces of block from tables. + let blocks_and_execution = tx + .take_block_and_execution_range( + self.externals.chain_spec.as_ref(), + (revert_until + 1).., + ) + .map_err(|e| ExecError::CanonicalRevert { inner: e.to_string() })?; + + // update pipeline progress. + tx.update_pipeline_stages(revert_until) + .map_err(|e| ExecError::PipelineStatusUpdate { inner: e.to_string() })?; + + tx.commit()?; + + Ok(Chain::new(blocks_and_execution)) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::test_utils::TestExecutorFactory; + use reth_db::{ + mdbx::{test_utils::create_test_rw_db, Env, WriteMap}, + transaction::DbTxMut, + }; + use reth_interfaces::test_utils::TestConsensus; + use reth_primitives::{proofs::EMPTY_ROOT, ChainSpecBuilder, H256, MAINNET}; + use reth_provider::{ + insert_block, post_state::PostState, test_utils::blocks::BlockChainTestData, + }; + use std::{collections::HashSet, sync::Arc}; + + fn setup_externals( + exec_res: Vec, + ) -> TreeExternals, Arc, TestExecutorFactory> { + let db = create_test_rw_db(); + let consensus = Arc::new(TestConsensus::default()); + let chain_spec = Arc::new( + ChainSpecBuilder::default() + .chain(MAINNET.chain) + .genesis(MAINNET.genesis.clone()) + .shanghai_activated() + .build(), + ); + let executor_factory = TestExecutorFactory::new(chain_spec.clone()); + executor_factory.extend(exec_res); + + TreeExternals::new(db, consensus, executor_factory, chain_spec) + } + + fn setup_genesis(db: DB, mut genesis: SealedBlock) { + // insert genesis to db. + + genesis.header.header.number = 10; + genesis.header.header.state_root = EMPTY_ROOT; + let tx_mut = db.tx_mut().unwrap(); + + insert_block(&tx_mut, genesis, None, false, Some((0, 0))).unwrap(); + + // insert first 10 blocks + for i in 0..10 { + tx_mut.put::(i, H256([100 + i as u8; 32])).unwrap(); + } + tx_mut.commit().unwrap(); + } + + /// Test data structure that will check tree internals + #[derive(Default, Debug)] + struct TreeTester { + /// Number of chains + chain_num: Option, + /// Check block to chain index + block_to_chain: Option>, + /// Check fork to child index + fork_to_child: Option>>, + } + + impl TreeTester { + fn with_chain_num(mut self, chain_num: usize) -> Self { + self.chain_num = Some(chain_num); + self + } + fn with_block_to_chain(mut self, block_to_chain: HashMap) -> Self { + self.block_to_chain = Some(block_to_chain); + self + } + fn with_fork_to_child( + mut self, + fork_to_child: HashMap>, + ) -> Self { + self.fork_to_child = Some(fork_to_child); + self + } + + fn assert( + self, + tree: &BlockchainTree, + ) { + if let Some(chain_num) = self.chain_num { + assert_eq!(tree.chains.len(), chain_num); + } + if let Some(block_to_chain) = self.block_to_chain { + assert_eq!(*tree.block_indices.blocks_to_chain(), block_to_chain); + } + if let Some(fork_to_child) = self.fork_to_child { + assert_eq!(*tree.block_indices.fork_to_child(), fork_to_child); + } + } + } + + #[test] + fn sanity_path() { + let data = BlockChainTestData::default(); + let (mut block1, exec1) = data.blocks[0].clone(); + block1.number = 11; + let (mut block2, exec2) = data.blocks[1].clone(); + block2.number = 12; + + // test pops execution results from vector, so order is from last to first.ß + let externals = setup_externals(vec![exec2.clone(), exec1.clone(), exec2, exec1]); + + // last finalized block would be number 9. + setup_genesis(externals.db.clone(), data.genesis); + + // make tree + let config = BlockchainTreeConfig::new(1, 2, 3); + let mut tree = BlockchainTree::new(externals, config).expect("failed to create tree"); + + // genesis block 10 is already canonical + assert_eq!(tree.make_canonical(&H256::zero()), Ok(())); + + // insert block2 hits max chain size + assert_eq!( + tree.insert_block_with_senders(&block2), + Err(ExecError::PendingBlockIsInFuture { + block_number: block2.number, + block_hash: block2.hash(), + last_finalized: 9, + } + .into()) + ); + + // make genesis block 10 as finalized + tree.finalize_block(10); + + // block 2 parent is not known. + assert_eq!(tree.insert_block_with_senders(&block2), Ok(BlockStatus::Disconnected)); + + // insert block1 + assert_eq!(tree.insert_block_with_senders(&block1), Ok(BlockStatus::Valid)); + // already inserted block will return true. + assert_eq!(tree.insert_block_with_senders(&block1), Ok(BlockStatus::Valid)); + + // insert block2 + assert_eq!(tree.insert_block_with_senders(&block2), Ok(BlockStatus::Valid)); + + // Trie state: + // b2 (pending block) + // | + // | + // b1 (pending block) + // / + // / + // g1 (canonical blocks) + // | + TreeTester::default() + .with_chain_num(1) + .with_block_to_chain(HashMap::from([(block1.hash, 0), (block2.hash, 0)])) + .with_fork_to_child(HashMap::from([(block1.parent_hash, HashSet::from([block1.hash]))])) + .assert(&tree); + + // make block1 canonical + assert_eq!(tree.make_canonical(&block1.hash()), Ok(())); + // make block2 canonical + assert_eq!(tree.make_canonical(&block2.hash()), Ok(())); + + // Trie state: + // b2 (canonical block) + // | + // | + // b1 (canonical block) + // | + // | + // g1 (canonical blocks) + // | + TreeTester::default() + .with_chain_num(0) + .with_block_to_chain(HashMap::from([])) + .with_fork_to_child(HashMap::from([])) + .assert(&tree); + + let mut block1a = block1.clone(); + let block1a_hash = H256([0x33; 32]); + block1a.hash = block1a_hash; + let mut block2a = block2.clone(); + let block2a_hash = H256([0x34; 32]); + block2a.hash = block2a_hash; + + // reinsert two blocks that point to canonical chain + assert_eq!(tree.insert_block_with_senders(&block1a), Ok(BlockStatus::Accepted)); + + TreeTester::default() + .with_chain_num(1) + .with_block_to_chain(HashMap::from([(block1a_hash, 1)])) + .with_fork_to_child(HashMap::from([( + block1.parent_hash, + HashSet::from([block1a_hash]), + )])) + .assert(&tree); + + assert_eq!(tree.insert_block_with_senders(&block2a), Ok(BlockStatus::Accepted)); + // Trie state: + // b2 b2a (side chain) + // | / + // | / + // b1 b1a (side chain) + // | / + // |/ + // g1 (10) + // | + TreeTester::default() + .with_chain_num(2) + .with_block_to_chain(HashMap::from([(block1a_hash, 1), (block2a_hash, 2)])) + .with_fork_to_child(HashMap::from([ + (block1.parent_hash, HashSet::from([block1a_hash])), + (block1.hash(), HashSet::from([block2a_hash])), + ])) + .assert(&tree); + + // make b2a canonical + assert_eq!(tree.make_canonical(&block2a_hash), Ok(())); + // Trie state: + // b2a b2 (side chain) + // | / + // | / + // b1 b1a (side chain) + // | / + // |/ + // g1 (10) + // | + TreeTester::default() + .with_chain_num(2) + .with_block_to_chain(HashMap::from([(block1a_hash, 1), (block2.hash, 3)])) + .with_fork_to_child(HashMap::from([ + (block1.parent_hash, HashSet::from([block1a_hash])), + (block1.hash(), HashSet::from([block2.hash])), + ])) + .assert(&tree); + + assert_eq!(tree.make_canonical(&block1a_hash), Ok(())); + // Trie state: + // b2a b2 (side chain) + // | / + // | / + // b1a b1 (side chain) + // | / + // |/ + // g1 (10) + // | + TreeTester::default() + .with_chain_num(2) + .with_block_to_chain(HashMap::from([ + (block1.hash, 4), + (block2a_hash, 4), + (block2.hash, 3), + ])) + .with_fork_to_child(HashMap::from([ + (block1.parent_hash, HashSet::from([block1.hash])), + (block1.hash(), HashSet::from([block2.hash])), + ])) + .assert(&tree); + + // make b2 canonical + assert_eq!(tree.make_canonical(&block2.hash()), Ok(())); + // Trie state: + // b2 b2a (side chain) + // | / + // | / + // b1 b1a (side chain) + // | / + // |/ + // g1 (10) + // | + TreeTester::default() + .with_chain_num(2) + .with_block_to_chain(HashMap::from([(block1a_hash, 5), (block2a_hash, 4)])) + .with_fork_to_child(HashMap::from([ + (block1.parent_hash, HashSet::from([block1a_hash])), + (block1.hash(), HashSet::from([block2a_hash])), + ])) + .assert(&tree); + + // finalize b1 that would make b1a removed from tree + tree.finalize_block(11); + // Trie state: + // b2 b2a (side chain) + // | / + // | / + // b1 (canon) + // | + // g1 (10) + // | + TreeTester::default() + .with_chain_num(1) + .with_block_to_chain(HashMap::from([(block2a_hash, 4)])) + .with_fork_to_child(HashMap::from([(block1.hash(), HashSet::from([block2a_hash]))])) + .assert(&tree); + + // unwind canonical + assert_eq!(tree.unwind(block1.number), Ok(())); + // Trie state: + // b2 b2a (pending block) + // / / + // / / + // / / + // b1 (canonical block) + // | + // | + // g1 (canonical blocks) + // | + TreeTester::default() + .with_chain_num(2) + .with_block_to_chain(HashMap::from([(block2a_hash, 4), (block2.hash, 6)])) + .with_fork_to_child(HashMap::from([( + block1.hash(), + HashSet::from([block2a_hash, block2.hash]), + )])) + .assert(&tree); + + // commit b2a + assert_eq!(tree.make_canonical(&block2.hash), Ok(())); + // Trie state: + // b2 b2a (side chain) + // | / + // | / + // b1 (canon) + // | + // g1 (10) + // | + TreeTester::default() + .with_chain_num(1) + .with_block_to_chain(HashMap::from([(block2a_hash, 4)])) + .with_fork_to_child(HashMap::from([(block1.hash(), HashSet::from([block2a_hash]))])) + .assert(&tree); + + // update canonical block to b2, this would make b2a be removed + assert_eq!(tree.restore_canonical_hashes(12), Ok(())); + // Trie state: + // b2 (canon) + // | + // b1 (canon) + // | + // g1 (10) + // | + TreeTester::default() + .with_chain_num(0) + .with_block_to_chain(HashMap::from([])) + .with_fork_to_child(HashMap::from([])) + .assert(&tree); + } +} diff --git a/crates/executor/src/execution_result.rs b/crates/executor/src/execution_result.rs deleted file mode 100644 index 3f1b433facc..00000000000 --- a/crates/executor/src/execution_result.rs +++ /dev/null @@ -1,171 +0,0 @@ -use reth_db::{models::AccountBeforeTx, tables, transaction::DbTxMut, Error as DbError}; -use reth_primitives::{Account, Address, Receipt, H256, U256}; -use revm::primitives::Bytecode; -use std::collections::BTreeMap; - -/// Execution Result containing vector of transaction changesets -/// and block reward if present -#[derive(Debug)] -pub struct ExecutionResult { - /// Transaction changeset containing [Receipt], changed [Accounts][Account] and Storages. - pub tx_changesets: Vec, - /// Post block account changesets. This might include block reward, uncle rewards, withdrawals - /// or irregular state changes (DAO fork). - pub block_changesets: BTreeMap, -} - -/// After transaction is executed this structure contain -/// transaction [Receipt] every change to state ([Account], Storage, [Bytecode]) -/// that this transaction made and its old values -/// so that history account table can be updated. -#[derive(Debug, Clone)] -pub struct TransactionChangeSet { - /// Transaction receipt - pub receipt: Receipt, - /// State change that this transaction made on state. - pub changeset: BTreeMap, - /// new bytecode created as result of transaction execution. - pub new_bytecodes: BTreeMap, -} - -/// Contains old/new account changes -#[derive(Debug, Clone, Eq, PartialEq)] -pub enum AccountInfoChangeSet { - /// The account is newly created. - Created { - /// The newly created account. - new: Account, - }, - /// An account was deleted (selfdestructed) or we have touched - /// an empty account and we need to remove/destroy it. - /// (Look at state clearing [EIP-158](https://eips.ethereum.org/EIPS/eip-158)) - Destroyed { - /// The account that was destroyed. - old: Account, - }, - /// The account was changed. - Changed { - /// The account after the change. - new: Account, - /// The account prior to the change. - old: Account, - }, - /// Nothing was changed for the account (nonce/balance). - NoChange, -} - -impl AccountInfoChangeSet { - /// Apply the changes from the changeset to a database transaction. - pub fn apply_to_db<'a, TX: DbTxMut<'a>>( - self, - tx: &TX, - address: Address, - tx_index: u64, - has_state_clear_eip: bool, - ) -> Result<(), DbError> { - match self { - AccountInfoChangeSet::Changed { old, new } => { - // insert old account in AccountChangeSet - // check for old != new was already done - tx.put::( - tx_index, - AccountBeforeTx { address, info: Some(old) }, - )?; - tx.put::(address, new)?; - } - AccountInfoChangeSet::Created { new } => { - // Ignore account that are created empty and state clear (SpuriousDragon) hardfork - // is activated. - if has_state_clear_eip && new.is_empty() { - return Ok(()) - } - tx.put::( - tx_index, - AccountBeforeTx { address, info: None }, - )?; - tx.put::(address, new)?; - } - AccountInfoChangeSet::Destroyed { old } => { - tx.delete::(address, None)?; - tx.put::( - tx_index, - AccountBeforeTx { address, info: Some(old) }, - )?; - } - AccountInfoChangeSet::NoChange => { - // do nothing storage account didn't change - } - } - Ok(()) - } -} - -/// Diff change set that is needed for creating history index and updating current world state. -#[derive(Debug, Clone)] -pub struct AccountChangeSet { - /// Old and New account account change. - pub account: AccountInfoChangeSet, - /// Storage containing key -> (OldValue,NewValue). in case that old value is not existing - /// we can expect to have U256::ZERO, same with new value. - pub storage: BTreeMap, - /// Just to make sure that we are taking selfdestruct cleaning we have this field that wipes - /// storage. There are instances where storage is changed but account is not touched, so we - /// can't take into account that if new account is None that it is selfdestruct. - pub wipe_storage: bool, -} - -#[cfg(test)] -mod tests { - use std::sync::Arc; - - use reth_db::{ - database::Database, - mdbx::{test_utils, Env, EnvKind, WriteMap}, - transaction::DbTx, - }; - use reth_primitives::H160; - - use super::*; - - #[test] - fn apply_account_info_changeset() { - let db: Arc> = test_utils::create_test_db(EnvKind::RW); - let address = H160::zero(); - let tx_num = 0; - let acc1 = Account { balance: U256::from(1), nonce: 2, bytecode_hash: Some(H256::zero()) }; - let acc2 = Account { balance: U256::from(3), nonce: 4, bytecode_hash: Some(H256::zero()) }; - - let tx = db.tx_mut().unwrap(); - - // check Changed changeset - AccountInfoChangeSet::Changed { new: acc1, old: acc2 } - .apply_to_db(&tx, address, tx_num, true) - .unwrap(); - assert_eq!( - tx.get::(tx_num), - Ok(Some(AccountBeforeTx { address, info: Some(acc2) })) - ); - assert_eq!(tx.get::(address), Ok(Some(acc1))); - - AccountInfoChangeSet::Created { new: acc1 } - .apply_to_db(&tx, address, tx_num, true) - .unwrap(); - assert_eq!( - tx.get::(tx_num), - Ok(Some(AccountBeforeTx { address, info: None })) - ); - assert_eq!(tx.get::(address), Ok(Some(acc1))); - - // delete old value, as it is dupsorted - tx.delete::(tx_num, None).unwrap(); - - AccountInfoChangeSet::Destroyed { old: acc2 } - .apply_to_db(&tx, address, tx_num, true) - .unwrap(); - assert_eq!(tx.get::(address), Ok(None)); - assert_eq!( - tx.get::(tx_num), - Ok(Some(AccountBeforeTx { address, info: Some(acc2) })) - ); - } -} diff --git a/crates/executor/src/executor.rs b/crates/executor/src/executor.rs index 24125d5551e..2e56a72090c 100644 --- a/crates/executor/src/executor.rs +++ b/crates/executor/src/executor.rs @@ -1,49 +1,73 @@ -use crate::execution_result::{ - AccountChangeSet, AccountInfoChangeSet, ExecutionResult, TransactionChangeSet, -}; -use hashbrown::hash_map::Entry; -use reth_interfaces::executor::{BlockExecutor, Error}; +use crate::post_state::PostState; +use reth_interfaces::executor::Error; use reth_primitives::{ - bloom::logs_bloom, Account, Address, Block, Bloom, ChainSpec, Hardfork, Header, Log, Receipt, - TransactionSigned, H256, U256, + bloom::logs_bloom, Account, Address, Block, Bloom, Bytecode, ChainSpec, Hardfork, Header, Log, + Receipt, TransactionSigned, H256, U256, }; -use reth_provider::StateProvider; +use reth_provider::{BlockExecutor, StateProvider}; use reth_revm::{ config::{WEI_2ETH, WEI_3ETH, WEI_5ETH}, database::SubState, env::{fill_cfg_and_block_env, fill_tx_env}, into_reth_log, to_reth_acc, }; +use reth_revm_inspectors::stack::{InspectorStack, InspectorStackConfig}; use revm::{ db::AccountState, - primitives::{Account as RevmAccount, AccountInfo, Bytecode, ResultAndState}, + primitives::{ + hash_map::{self, Entry}, + Account as RevmAccount, AccountInfo, ResultAndState, + }, EVM, }; -use std::collections::{BTreeMap, HashMap}; +use std::{ + collections::{BTreeMap, HashMap}, + sync::Arc, +}; /// Main block executor -pub struct Executor<'a, DB> +pub struct Executor +where + DB: StateProvider, +{ + /// The configured chain-spec + pub chain_spec: Arc, + evm: EVM>, + stack: InspectorStack, +} + +impl From> for Executor where DB: StateProvider, { - chain_spec: &'a ChainSpec, - evm: EVM<&'a mut SubState>, - /// Enable revm inspector printer. - /// In execution this will print opcode level traces directly to console. - pub use_printer_tracer: bool, + /// Instantiates a new executor from the chainspec. Must call + /// `with_db` to set the database before executing. + fn from(chain_spec: Arc) -> Self { + let evm = EVM::new(); + Executor { chain_spec, evm, stack: InspectorStack::new(InspectorStackConfig::default()) } + } } -impl<'a, DB> Executor<'a, DB> +impl Executor where DB: StateProvider, { - fn new(chain_spec: &'a ChainSpec, db: &'a mut SubState) -> Self { + /// Creates a new executor from the given chain spec and database. + pub fn new(chain_spec: Arc, db: SubState) -> Self { let mut evm = EVM::new(); evm.database(db); - Executor { chain_spec, evm, use_printer_tracer: false } + + Executor { chain_spec, evm, stack: InspectorStack::new(InspectorStackConfig::default()) } } - fn db(&mut self) -> &mut SubState { + /// Configures the executor with the given inspectors. + pub fn with_stack(mut self, stack: InspectorStack) -> Self { + self.stack = stack; + self + } + + /// Gives a reference to the database + pub fn db(&mut self) -> &mut SubState { self.evm.db().expect("db to not be moved") } @@ -65,25 +89,25 @@ where /// Initializes the config and block env. fn init_env(&mut self, header: &Header, total_difficulty: U256) { - fill_cfg_and_block_env(&mut self.evm.env, self.chain_spec, header, total_difficulty); + fill_cfg_and_block_env( + &mut self.evm.env.cfg, + &mut self.evm.env.block, + &self.chain_spec, + header, + total_difficulty, + ); } - /// Commit change to database and return change diff that is used to update state and create - /// history index - /// - /// ChangeDiff consists of: - /// address->AccountChangeSet (It contains old and new account info,storage wipe flag, and - /// old/new storage) bytecode_hash->bytecodes mapping - /// - /// BTreeMap is used to have sorted values + /// Commit change to the run-time database, and update the given [PostState] with the changes + /// made in the transaction, which can be persisted to the database. fn commit_changes( &mut self, - changes: hashbrown::HashMap, - ) -> (BTreeMap, BTreeMap) { + changes: hash_map::HashMap, + has_state_clear_eip: bool, + post_state: &mut PostState, + ) { let db = self.db(); - let mut change = BTreeMap::new(); - let mut new_bytecodes = BTreeMap::new(); // iterate over all changed accounts for (address, account) in changes { if account.is_destroyed { @@ -96,16 +120,8 @@ where }; // Insert into `change` a old account and None for new account // and mark storage to be mapped - change.insert( - address, - AccountChangeSet { - account: AccountInfoChangeSet::Destroyed { - old: to_reth_acc(&db_account.info), - }, - storage: BTreeMap::new(), - wipe_storage: true, - }, - ); + post_state.destroy_account(address, to_reth_acc(&db_account.info)); + // clear cached DB and mark account as not existing db_account.storage.clear(); db_account.account_state = AccountState::NotExisting; @@ -117,83 +133,84 @@ where // does it exist inside cached contracts if it doesn't it is new bytecode that // we are inserting inside `change` if let Some(ref code) = account.info.code { - if !code.is_empty() { - match db.contracts.entry(account.info.code_hash) { - Entry::Vacant(entry) => { - entry.insert(code.clone()); - new_bytecodes.insert(H256(account.info.code_hash.0), code.clone()); - } - Entry::Occupied(mut entry) => { - entry.insert(code.clone()); - } - } + if !code.is_empty() && !db.contracts.contains_key(&account.info.code_hash) { + db.contracts.insert(account.info.code_hash, code.clone()); + post_state.add_bytecode(account.info.code_hash, Bytecode(code.clone())); } } // get old account that is going to be overwritten or none if it does not exist // and get new account that was just inserted. new account mut ref is used for // inserting storage - let (account_info_changeset, new_account) = match db.accounts.entry(address) { + let cached_account = match db.accounts.entry(address) { Entry::Vacant(entry) => { let entry = entry.insert(Default::default()); entry.info = account.info.clone(); - // account was not existing, so this means new account is created - (AccountInfoChangeSet::Created { new: to_reth_acc(&entry.info) }, entry) + + let account = to_reth_acc(&entry.info); + if !(has_state_clear_eip && account.is_empty()) { + post_state.create_account(address, account); + } + entry } Entry::Occupied(entry) => { let entry = entry.into_mut(); - // account is present inside cache but is marked as NotExisting. - let account_changeset = - if matches!(entry.account_state, AccountState::NotExisting) { - AccountInfoChangeSet::Created { new: to_reth_acc(&account.info) } - } else if entry.info != account.info { - AccountInfoChangeSet::Changed { - old: to_reth_acc(&entry.info), - new: to_reth_acc(&account.info), - } - } else { - AccountInfoChangeSet::NoChange - }; + if matches!(entry.account_state, AccountState::NotExisting) { + let account = to_reth_acc(&account.info); + if !(has_state_clear_eip && account.is_empty()) { + post_state.create_account(address, account); + } + } else if entry.info != account.info { + post_state.change_account( + address, + to_reth_acc(&entry.info), + to_reth_acc(&account.info), + ); + } else if has_state_clear_eip && account.is_empty() { + // The account was touched, but it is empty, so it should be deleted. + post_state.destroy_account(address, to_reth_acc(&account.info)); + } + entry.info = account.info.clone(); - (account_changeset, entry) + entry } }; - new_account.account_state = if account.storage_cleared { - new_account.storage.clear(); + cached_account.account_state = if account.storage_cleared { + cached_account.storage.clear(); + AccountState::StorageCleared + } else if cached_account.account_state.is_storage_cleared() { + // the account already exists and its storage was cleared, preserve its previous + // state AccountState::StorageCleared } else { AccountState::Touched }; // Insert storage. - let mut storage = BTreeMap::new(); + let mut storage_changeset = BTreeMap::new(); // insert storage into new db account. - new_account.storage.extend(account.storage.into_iter().map(|(key, value)| { - storage.insert(key, (value.original_value(), value.present_value())); + cached_account.storage.extend(account.storage.into_iter().map(|(key, value)| { + storage_changeset.insert(key, (value.original_value(), value.present_value())); (key, value.present_value()) })); // Insert into change. - change.insert( - address, - AccountChangeSet { - account: account_info_changeset, - storage, - wipe_storage: false, - }, - ); + if !storage_changeset.is_empty() { + post_state.change_storage(address, storage_changeset); + } } } - (change, new_bytecodes) } - /// Collect all balance changes at the end of the block. Balance changes might include block - /// reward, uncle rewards, withdrawals or irregular state changes (DAO fork). + /// Collect all balance changes at the end of the block. + /// + /// Balance changes might include the block reward, uncle rewards, withdrawals, or irregular + /// state changes (DAO fork). fn post_block_balance_increments( - &mut self, + &self, block: &Block, td: U256, ) -> Result, Error> { @@ -255,48 +272,36 @@ where } /// Irregular state change at Ethereum DAO hardfork - fn dao_fork_changeset(&mut self) -> Result, Error> { + fn apply_dao_fork_changes(&mut self, post_state: &mut PostState) -> Result<(), Error> { let db = self.db(); let mut drained_balance = U256::ZERO; // drain all accounts ether - let mut changesets = crate::eth_dao_fork::DAO_HARDKFORK_ACCOUNTS - .iter() - .map(|&address| { - let db_account = db.load_account(address).map_err(|_| Error::ProviderError)?; - let old = to_reth_acc(&db_account.info); - // drain balance - drained_balance += core::mem::take(&mut db_account.info.balance); - let new = to_reth_acc(&db_account.info); - // assume it is changeset as it is irregular state change - Ok((address, AccountInfoChangeSet::Changed { new, old })) - }) - .collect::, _>>()?; + for address in crate::eth_dao_fork::DAO_HARDKFORK_ACCOUNTS { + let db_account = db.load_account(address).map_err(|_| Error::ProviderError)?; + let old = to_reth_acc(&db_account.info); + // drain balance + drained_balance += core::mem::take(&mut db_account.info.balance); + let new = to_reth_acc(&db_account.info); + // assume it is changeset as it is irregular state change + post_state.change_account(address, old, new); + } // add drained ether to beneficiary. let beneficiary = crate::eth_dao_fork::DAO_HARDFORK_BENEFICIARY; + self.increment_account_balance(beneficiary, drained_balance, post_state)?; - let beneficiary_db_account = - db.load_account(beneficiary).map_err(|_| Error::ProviderError)?; - let old = to_reth_acc(&beneficiary_db_account.info); - beneficiary_db_account.info.balance += drained_balance; - let new = to_reth_acc(&beneficiary_db_account.info); - - let beneficiary_changeset = AccountInfoChangeSet::Changed { new, old }; - - // insert changeset - changesets.insert(beneficiary, beneficiary_changeset); - - Ok(changesets) + Ok(()) } - /// Generate balance increment account changeset and mutate account database entry in place. - fn account_balance_increment_changeset( + /// Increment the balance for the given account in the [PostState]. + fn increment_account_balance( &mut self, address: Address, increment: U256, - ) -> Result { + post_state: &mut PostState, + ) -> Result<(), Error> { let db = self.db(); let beneficiary = db.load_account(address).map_err(|_| Error::ProviderError)?; let old = to_reth_acc(&beneficiary.info); @@ -310,9 +315,10 @@ where beneficiary.account_state = AccountState::StorageCleared; // if account was not present append `Created` changeset - Ok(AccountInfoChangeSet::Created { - new: Account { nonce: 0, balance: new.balance, bytecode_hash: None }, - }) + post_state.create_account( + address, + Account { nonce: 0, balance: new.balance, bytecode_hash: None }, + ) } AccountState::StorageCleared | AccountState::Touched | AccountState::None => { @@ -323,30 +329,64 @@ where beneficiary.account_state = AccountState::Touched; } // if account was present, append changed changeset. - Ok(AccountInfoChangeSet::Changed { new, old }) + post_state.change_account(address, old, new); } } + + Ok(()) } -} -impl<'a, DB> BlockExecutor for Executor<'a, DB> -where - DB: StateProvider, -{ - fn execute( + /// Runs a single transaction in the configured environment and proceeds + /// to return the result and state diff (without applying it). + /// + /// Assumes the rest of the block environment has been filled via `init_block_env`. + pub fn transact( + &mut self, + transaction: &TransactionSigned, + sender: Address, + ) -> Result { + // Fill revm structure. + fill_tx_env(&mut self.evm.env.tx, transaction, sender); + + let hash = transaction.hash(); + let out = if self.stack.should_inspect(&self.evm.env, hash) { + // execution with inspector. + let output = self.evm.inspect(&mut self.stack); + tracing::trace!( + target: "evm", + ?hash, ?output, ?transaction, env = ?self.evm.env, + "Executed transaction" + ); + output + } else { + // main execution. + self.evm.transact() + }; + out.map_err(|e| Error::EVM { hash, message: format!("{e:?}") }) + } + + /// Runs the provided transactions and commits their state to the run-time database. + /// + /// The returned [PostState] can be used to persist the changes to disk, and contains the + /// changes made by each transaction. + /// + /// The changes in [PostState] have a transition ID associated with them: there is one + /// transition ID for each transaction (with the first executed tx having transition ID 0, and + /// so on). + /// + /// The second returned value represents the total gas used by this block of transactions. + pub fn execute_transactions( &mut self, block: &Block, total_difficulty: U256, senders: Option>, - ) -> Result { + ) -> Result<(PostState, u64), Error> { let senders = self.recover_senders(&block.body, senders)?; self.init_env(&block.header, total_difficulty); let mut cumulative_gas_used = 0; - // output of execution - let mut tx_changesets = Vec::with_capacity(block.body.len()); - + let mut post_state = PostState::with_tx_capacity(block.body.len()); for (transaction, sender) in block.body.iter().zip(senders.into_iter()) { // The sum of the transaction’s gas limit, Tg, and the gas utilised in this block prior, // must be no greater than the block’s gasLimit. @@ -357,30 +397,15 @@ where block_available_gas, }) } - - // Fill revm structure. - fill_tx_env(&mut self.evm.env.tx, transaction, sender); - // Execute transaction. - let out = if self.use_printer_tracer { - // execution with inspector. - let output = self.evm.inspect(revm::inspectors::CustomPrintTracer::default()); - tracing::trace!( - target: "evm", - hash = ?transaction.hash(), ?output, ?transaction, env = ?self.evm.env, - "Executed transaction" - ); - output - } else { - // main execution. - self.evm.transact() - }; - - // cast the error and extract returnables. - let ResultAndState { result, state } = out.map_err(|e| Error::EVM(format!("{e:?}")))?; + let ResultAndState { result, state } = self.transact(transaction, sender)?; // commit changes - let (changeset, new_bytecodes) = self.commit_changes(state); + self.commit_changes( + state, + self.chain_spec.fork(Hardfork::SpuriousDragon).active_at_block(block.number), + &mut post_state, + ); // append gas used cumulative_gas_used += result.gas_used(); @@ -389,66 +414,81 @@ where let logs: Vec = result.logs().into_iter().map(into_reth_log).collect(); // Push transaction changeset and calculate header bloom filter for receipt. - tx_changesets.push(TransactionChangeSet { - receipt: Receipt { - tx_type: transaction.tx_type(), - // Success flag was added in `EIP-658: Embedding transaction status code in - // receipts`. - success: result.is_success(), - cumulative_gas_used, - bloom: logs_bloom(logs.iter()), - logs, - }, - changeset, - new_bytecodes, + post_state.add_receipt(Receipt { + tx_type: transaction.tx_type(), + // Success flag was added in `EIP-658: Embedding transaction status code in + // receipts`. + success: result.is_success(), + cumulative_gas_used, + bloom: logs_bloom(logs.iter()), + logs, }); + post_state.finish_transition(); } + Ok((post_state, cumulative_gas_used)) + } +} + +impl BlockExecutor for Executor +where + DB: StateProvider, +{ + fn execute( + &mut self, + block: &Block, + total_difficulty: U256, + senders: Option>, + ) -> Result { + let (mut post_state, cumulative_gas_used) = + self.execute_transactions(block, total_difficulty, senders)?; + // Check if gas used matches the value set in header. if block.gas_used != cumulative_gas_used { return Err(Error::BlockGasUsed { got: cumulative_gas_used, expected: block.gas_used }) } - let mut block_changesets = BTreeMap::default(); let balance_increments = self.post_block_balance_increments(block, total_difficulty)?; - for (address, increment) in balance_increments { - let changeset = self.account_balance_increment_changeset(address, increment)?; - block_changesets.insert(address, changeset); + let mut includes_block_transition = !balance_increments.is_empty(); + for (address, increment) in balance_increments.into_iter() { + self.increment_account_balance(address, increment, &mut post_state)?; } if self.chain_spec.fork(Hardfork::Dao).transitions_at_block(block.number) { - for (address, changeset) in self.dao_fork_changeset()? { - // No account collision between rewarded accounts and DAO fork related accounts. - block_changesets.insert(address, changeset); - } + includes_block_transition = true; + self.apply_dao_fork_changes(&mut post_state)?; + } + + if includes_block_transition { + post_state.finish_transition(); } - Ok(ExecutionResult { tx_changesets, block_changesets }) + Ok(post_state) } -} -/// Execute and verify block -pub fn execute_and_verify_receipt( - block: &Block, - total_difficulty: U256, - senders: Option>, - chain_spec: &ChainSpec, - db: &mut SubState, -) -> Result { - let execution_result = execute(block, total_difficulty, senders, chain_spec, db)?; + fn execute_and_verify_receipt( + &mut self, + block: &Block, + total_difficulty: U256, + senders: Option>, + ) -> Result { + let post_state = self.execute(block, total_difficulty, senders)?; + + if self.chain_spec.fork(Hardfork::Byzantium).active_at_block(block.header.number) { + verify_receipt( + block.header.receipts_root, + block.header.logs_bloom, + post_state.receipts().iter(), + )?; + } - let receipts_iter = execution_result.tx_changesets.iter().map(|changeset| &changeset.receipt); + // TODO Before Byzantium, receipts contained state root that would mean that expensive + // operation as hashing that is needed for state root got calculated in every + // transaction This was replaced with is_success flag. + // See more about EIP here: https://eips.ethereum.org/EIPS/eip-658 - if chain_spec.fork(Hardfork::Byzantium).active_at_block(block.header.number) { - verify_receipt(block.header.receipts_root, block.header.logs_bloom, receipts_iter)?; + Ok(post_state) } - - // TODO Before Byzantium, receipts contained state root that would mean that expensive operation - // as hashing that is needed for state root got calculated in every transaction - // This was replaced with is_success flag. - // See more about EIP here: https://eips.ethereum.org/EIPS/eip-658 - - Ok(execution_result) } /// Verify receipts @@ -474,29 +514,17 @@ pub fn verify_receipt<'a>( Ok(()) } -/// Verify block. Execute all transaction and compare results. -/// Returns ChangeSet on transaction granularity. -/// NOTE: If block reward is still active (Before Paris/Merge) we would return -/// additional TransactionStatechangeset for account that receives the reward. -pub fn execute( - block: &Block, - total_difficulty: U256, - senders: Option>, - chain_spec: &ChainSpec, - db: &mut SubState, -) -> Result { - let mut executor = Executor::new(chain_spec, db); - executor.execute(block, total_difficulty, senders) -} - #[cfg(test)] mod tests { use super::*; use reth_primitives::{ - hex_literal::hex, keccak256, Account, Address, Bytes, ChainSpecBuilder, ForkCondition, - StorageKey, H256, MAINNET, U256, + hex_literal::hex, keccak256, Account, Address, BlockNumber, Bytecode, Bytes, + ChainSpecBuilder, ForkCondition, StorageKey, H256, MAINNET, U256, + }; + use reth_provider::{ + post_state::{Change, Storage}, + AccountProvider, BlockHashProvider, StateProvider, }; - use reth_provider::{AccountProvider, BlockHashProvider, StateProvider}; use reth_revm::database::State; use reth_rlp::Decodable; use std::{collections::HashMap, str::FromStr}; @@ -504,8 +532,8 @@ mod tests { #[derive(Debug, Default, Clone, Eq, PartialEq)] struct StateProviderTest { accounts: HashMap, Account)>, - contracts: HashMap, - block_hash: HashMap, + contracts: HashMap, + block_hash: HashMap, } impl StateProviderTest { @@ -520,7 +548,7 @@ mod tests { if let Some(bytecode) = bytecode { let hash = keccak256(&bytecode); account.bytecode_hash = Some(hash); - self.contracts.insert(hash, bytecode); + self.contracts.insert(hash, Bytecode::new_raw(bytecode.into())); } self.accounts.insert(address, (storage, account)); } @@ -534,9 +562,22 @@ mod tests { } impl BlockHashProvider for StateProviderTest { - fn block_hash(&self, number: U256) -> reth_interfaces::Result> { + fn block_hash(&self, number: u64) -> reth_interfaces::Result> { Ok(self.block_hash.get(&number).cloned()) } + + fn canonical_hashes_range( + &self, + start: BlockNumber, + end: BlockNumber, + ) -> reth_interfaces::Result> { + let range = start..end; + Ok(self + .block_hash + .iter() + .filter_map(|(block, hash)| range.contains(block).then_some(*hash)) + .collect()) + } } impl StateProvider for StateProviderTest { @@ -551,9 +592,17 @@ mod tests { .and_then(|(storage, _)| storage.get(&storage_key).cloned())) } - fn bytecode_by_hash(&self, code_hash: H256) -> reth_interfaces::Result> { + fn bytecode_by_hash(&self, code_hash: H256) -> reth_interfaces::Result> { Ok(self.contracts.get(&code_hash).cloned()) } + + fn proof( + &self, + _address: Address, + _keys: &[H256], + ) -> reth_interfaces::Result<(Vec, H256, Vec>)> { + todo!() + } } #[test] @@ -602,23 +651,31 @@ mod tests { ); // spec at berlin fork - let chain_spec = ChainSpecBuilder::mainnet().berlin_activated().build(); + let chain_spec = Arc::new(ChainSpecBuilder::mainnet().berlin_activated().build()); - let mut db = SubState::new(State::new(db)); + let db = SubState::new(State::new(db)); // execute chain and verify receipts - let out = - execute_and_verify_receipt(&block, U256::ZERO, None, &chain_spec, &mut db).unwrap(); + let mut executor = Executor::new(chain_spec, db); + let post_state = executor.execute_and_verify_receipt(&block, U256::ZERO, None).unwrap(); - assert_eq!(out.tx_changesets.len(), 1, "Should executed one transaction"); + assert_eq!( + post_state.transitions_count(), + 2, + "Should executed two transitions (1 tx and 1 block reward)" + ); - let changesets = out.tx_changesets[0].clone(); - assert_eq!(changesets.new_bytecodes.len(), 0, "Should have zero new bytecodes"); + let block_reward = U256::from(WEI_2ETH + (WEI_2ETH >> 5)); let account1_info = Account { balance: U256::ZERO, nonce: 0x00, bytecode_hash: None }; let account2_info = Account { - balance: U256::from(0x1bc16d674ece94bau128 - 0x1bc16d674ec80000u128), /* decrease for - * block reward */ + // Block reward decrease + balance: U256::from(0x1bc16d674ece94bau128 - 0x1bc16d674ec80000u128), + nonce: 0x00, + bytecode_hash: None, + }; + let account2_info_with_block_reward = Account { + balance: account2_info.balance + block_reward, nonce: 0x00, bytecode_hash: None, }; @@ -627,11 +684,12 @@ mod tests { nonce: 0x01, bytecode_hash: None, }; - - let block_reward = U256::from(WEI_2ETH + (WEI_2ETH >> 5)); + let ommer_beneficiary_info = + Account { nonce: 0, balance: U256::from((8 * WEI_2ETH) >> 3), bytecode_hash: None }; // Check if cache is set // account1 + let db = executor.db(); let cached_acc1 = db.accounts.get(&account1).unwrap(); assert_eq!(cached_acc1.info.balance, account1_info.balance); assert_eq!(cached_acc1.info.nonce, account1_info.nonce); @@ -653,60 +711,88 @@ mod tests { assert_eq!(cached_acc3.account_state, AccountState::Touched); assert_eq!(cached_acc3.storage.len(), 0); - assert_eq!( - changesets.changeset.get(&account1).unwrap().account, - AccountInfoChangeSet::NoChange, - "No change to account" + assert!( + post_state.accounts().get(&account1).is_none(), + "Account should not be present in post-state since it was not changed" ); + + // Check changes + const TX_TRANSITION_ID: u64 = 0; + const BLOCK_TRANSITION_ID: u64 = 1; + + // Clone and sort to make the test deterministic + let mut changes = post_state.changes().to_vec(); + changes.sort_by_key(|change| (change.transition_id(), change.address())); assert_eq!( - changesets.changeset.get(&account2).unwrap().account, - AccountInfoChangeSet::Created { new: account2_info }, - "New account" + changes, + &[ + // Storage changes on account 1 + Change::StorageChanged { + id: TX_TRANSITION_ID, + address: account1, + changeset: [(U256::from(1), (U256::ZERO, U256::from(2)))].into() + }, + // New account + Change::AccountCreated { + id: TX_TRANSITION_ID, + address: account2, + account: account2_info + }, + // Changed account + Change::AccountChanged { + id: TX_TRANSITION_ID, + address: account3, + old: account3_old_info, + new: account3_info + }, + // Block reward + Change::AccountChanged { + id: BLOCK_TRANSITION_ID, + address: account2, + old: account2_info, + new: account2_info_with_block_reward + }, + // Ommer reward + Change::AccountCreated { + id: BLOCK_TRANSITION_ID, + address: ommer_beneficiary, + account: ommer_beneficiary_info + }, + ], + "Changeset did not match" ); + + // Check final post-state assert_eq!( - changesets.changeset.get(&account3).unwrap().account, - AccountInfoChangeSet::Changed { old: account3_old_info, new: account3_info }, - "Change to account state" + post_state.storage(), + &BTreeMap::from([( + account1, + Storage { wiped: false, storage: BTreeMap::from([(U256::from(1), U256::from(2))]) } + )]), + "Should have changed 1 storage slot" ); + assert_eq!(post_state.bytecodes().len(), 0, "Should have zero new bytecodes"); - // check block rewards changeset. - let mut block_rewarded_acc_info = account2_info; - // add Blocks 2 eth reward and 2>>5 for one ommer - block_rewarded_acc_info.balance += block_reward; - - // check block reward changeset + let accounts = post_state.accounts(); assert_eq!( - out.block_changesets, - BTreeMap::from([ - ( - account2, - AccountInfoChangeSet::Changed { - new: block_rewarded_acc_info, - old: account2_info - } - ), - ( - ommer_beneficiary, - AccountInfoChangeSet::Created { - new: Account { - nonce: 0, - balance: U256::from((8 * WEI_2ETH) >> 3), - bytecode_hash: None - } - } - ) - ]) + accounts.len(), + 3, + "Should have 4 accounts (account 2, 3 and the ommer beneficiary)" + ); + assert_eq!( + accounts.get(&account2).unwrap(), + &Some(account2_info_with_block_reward), + "Account 2 state is wrong" ); - - assert_eq!(changesets.new_bytecodes.len(), 0, "No new bytecodes"); - - // check storage - let storage = &changesets.changeset.get(&account1).unwrap().storage; - assert_eq!(storage.len(), 1, "Only one storage change"); assert_eq!( - storage.get(&U256::from(1)), - Some(&(U256::ZERO, U256::from(2))), - "Storage change from 0 to 2 on slot 1" + accounts.get(&account3).unwrap(), + &Some(account3_info), + "Account 3 state is wrong" + ); + assert_eq!( + accounts.get(&ommer_beneficiary).unwrap(), + &Some(ommer_beneficiary_info), + "Ommer beneficiary state is wrong" ); } @@ -727,25 +813,32 @@ mod tests { beneficiary_balance += i; } - let chain_spec = ChainSpecBuilder::from(&*MAINNET) - .homestead_activated() - .with_fork(Hardfork::Dao, ForkCondition::Block(1)) - .build(); + let chain_spec = Arc::new( + ChainSpecBuilder::from(&*MAINNET) + .homestead_activated() + .with_fork(Hardfork::Dao, ForkCondition::Block(1)) + .build(), + ); - let mut db = SubState::new(State::new(db)); + let db = SubState::new(State::new(db)); // execute chain and verify receipts - let out = execute_and_verify_receipt( - &Block { header, body: vec![], ommers: vec![], withdrawals: None }, - U256::ZERO, - None, - &chain_spec, - &mut db, - ) - .unwrap(); - assert_eq!(out.tx_changesets.len(), 0, "No tx"); + let mut executor = Executor::new(chain_spec, db); + let out = executor + .execute_and_verify_receipt( + &Block { header, body: vec![], ommers: vec![], withdrawals: None }, + U256::ZERO, + None, + ) + .unwrap(); + assert_eq!( + out.transitions_count(), + 1, + "Should only have 1 transition (the block transition)" + ); // Check if cache is set // beneficiary + let db = executor.db(); let dao_beneficiary = db.accounts.get(&crate::eth_dao_fork::DAO_HARDFORK_BENEFICIARY).unwrap(); @@ -756,24 +849,15 @@ mod tests { } // check changesets - let change_set = - out.block_changesets.get(&crate::eth_dao_fork::DAO_HARDFORK_BENEFICIARY).unwrap(); + let beneficiary_state = + out.accounts().get(&crate::eth_dao_fork::DAO_HARDFORK_BENEFICIARY).unwrap().unwrap(); assert_eq!( - *change_set, - AccountInfoChangeSet::Changed { - new: Account { balance: U256::from(beneficiary_balance), ..Default::default() }, - old: Account { balance: U256::ZERO, ..Default::default() } - } + beneficiary_state, + Account { balance: U256::from(beneficiary_balance), ..Default::default() }, ); - for (i, address) in crate::eth_dao_fork::DAO_HARDKFORK_ACCOUNTS.iter().enumerate() { - let change_set = out.block_changesets.get(address).unwrap(); - assert_eq!( - *change_set, - AccountInfoChangeSet::Changed { - new: Account { balance: U256::ZERO, ..Default::default() }, - old: Account { balance: U256::from(i), ..Default::default() } - } - ); + for address in crate::eth_dao_fork::DAO_HARDKFORK_ACCOUNTS.iter() { + let updated_account = out.accounts().get(address).unwrap().unwrap(); + assert_eq!(updated_account, Account { balance: U256::ZERO, ..Default::default() }); } } @@ -820,18 +904,20 @@ mod tests { ); // spec at berlin fork - let chain_spec = ChainSpecBuilder::mainnet().berlin_activated().build(); + let chain_spec = Arc::new(ChainSpecBuilder::mainnet().berlin_activated().build()); - let mut db = SubState::new(State::new(db)); + let db = SubState::new(State::new(db)); // execute chain and verify receipts - let out = - execute_and_verify_receipt(&block, U256::ZERO, None, &chain_spec, &mut db).unwrap(); - - assert_eq!(out.tx_changesets.len(), 1, "Should executed one transaction"); + let mut executor = Executor::new(chain_spec, db); + let out = executor.execute_and_verify_receipt(&block, U256::ZERO, None).unwrap(); - let changesets = out.tx_changesets[0].clone(); - assert_eq!(changesets.new_bytecodes.len(), 0, "Should have zero new bytecodes"); + assert_eq!( + out.transitions_count(), + 2, + "Should only have two transitions (the transaction and the block)" + ); + assert_eq!(out.bytecodes().len(), 0, "Should have zero new bytecodes"); let post_account_caller = Account { balance: U256::from(0x0de0b6b3a761cf60u64), @@ -840,21 +926,20 @@ mod tests { }; assert_eq!( - changesets.changeset.get(&address_caller).unwrap().account, - AccountInfoChangeSet::Changed { new: post_account_caller, old: pre_account_caller }, + out.accounts().get(&address_caller).unwrap().unwrap(), + post_account_caller, "Caller account has changed and fee is deduced" ); - let selfdestroyer_changeset = changesets.changeset.get(&address_selfdestruct).unwrap(); - - // check account assert_eq!( - selfdestroyer_changeset.account, - AccountInfoChangeSet::Destroyed { old: pre_account_selfdestroyed }, - "Selfdestroyed account" + out.accounts().get(&address_selfdestruct).unwrap(), + &None, + "Selfdestructed account should have been deleted" + ); + assert!( + out.storage().get(&address_selfdestruct).unwrap().wiped, + "Selfdestructed account should have its storage wiped" ); - - assert!(selfdestroyer_changeset.wipe_storage); } // Test vector from https://github.com/ethereum/tests/blob/3156db5389921125bb9e04142d18e0e7b0cf8d64/BlockchainTests/EIPTests/bc4895-withdrawals/twoIdenticalIndexDifferentValidator.json @@ -869,45 +954,99 @@ mod tests { Address::from_str("c94f5374fce5edbc8e2a8697c15331677e6ebf0b").unwrap(); // spec at shanghai fork - let chain_spec = ChainSpecBuilder::mainnet().shanghai_activated().build(); + let chain_spec = Arc::new(ChainSpecBuilder::mainnet().shanghai_activated().build()); - let mut db = SubState::new(State::new(StateProviderTest::default())); + let db = SubState::new(State::new(StateProviderTest::default())); // execute chain and verify receipts - let out = - execute_and_verify_receipt(&block, U256::ZERO, None, &chain_spec, &mut db).unwrap(); - assert_eq!(out.tx_changesets.len(), 0, "No tx"); + let mut executor = Executor::new(chain_spec, db); + let out = executor.execute_and_verify_receipt(&block, U256::ZERO, None).unwrap(); + assert_eq!(out.transitions_count(), 1, "Only one transition (the block transition)"); let withdrawal_sum = withdrawals.iter().fold(U256::ZERO, |sum, w| sum + w.amount_wei()); - let beneficiary_account = db.accounts.get(&withdrawal_beneficiary).unwrap(); + let beneficiary_account = executor.db().accounts.get(&withdrawal_beneficiary).unwrap(); assert_eq!(beneficiary_account.info.balance, withdrawal_sum); assert_eq!(beneficiary_account.info.nonce, 0); assert_eq!(beneficiary_account.account_state, AccountState::StorageCleared); - assert_eq!(out.block_changesets.len(), 1); assert_eq!( - out.block_changesets.get(&withdrawal_beneficiary), - Some(&AccountInfoChangeSet::Created { - new: Account { nonce: 0, balance: withdrawal_sum, bytecode_hash: None }, - }) + out.accounts().get(&withdrawal_beneficiary).unwrap(), + &Some(Account { nonce: 0, balance: withdrawal_sum, bytecode_hash: None }), + "Withdrawal account should have gotten its balance set" ); // Execute same block again - let out = - execute_and_verify_receipt(&block, U256::ZERO, None, &chain_spec, &mut db).unwrap(); - assert_eq!(out.tx_changesets.len(), 0, "No tx"); + let out = executor.execute_and_verify_receipt(&block, U256::ZERO, None).unwrap(); + assert_eq!( + out.transitions_count(), + 1, + "Should only have one transition (the block transition)" + ); - assert_eq!(out.block_changesets.len(), 1); assert_eq!( - out.block_changesets.get(&withdrawal_beneficiary), - Some(&AccountInfoChangeSet::Changed { - old: Account { nonce: 0, balance: withdrawal_sum, bytecode_hash: None }, - new: Account { - nonce: 0, - balance: withdrawal_sum + withdrawal_sum, - bytecode_hash: None - }, - }) + out.accounts().get(&withdrawal_beneficiary).unwrap(), + &Some(Account { + nonce: 0, + balance: withdrawal_sum + withdrawal_sum, + bytecode_hash: None + }), + "Withdrawal account should have gotten its balance set" + ); + } + + #[test] + fn test_account_state_preserved() { + let account = Address::from_str("c94f5374fce5edbc8e2a8697c15331677e6ebf0b").unwrap(); + + let mut db = StateProviderTest::default(); + db.insert_account(account, Account::default(), None, HashMap::default()); + + let chain_spec = Arc::new(ChainSpecBuilder::mainnet().istanbul_activated().build()); + let db = SubState::new(State::new(db)); + + let default_acc = RevmAccount { + info: AccountInfo::default(), + storage: hash_map::HashMap::default(), + is_destroyed: false, + is_touched: false, + storage_cleared: false, + is_not_existing: false, + }; + let mut executor = Executor::new(chain_spec, db); + // touch account + executor.commit_changes( + hash_map::HashMap::from([(account, RevmAccount { ..default_acc.clone() })]), + true, + &mut PostState::default(), + ); + // destroy account + executor.commit_changes( + hash_map::HashMap::from([( + account, + RevmAccount { is_destroyed: true, is_touched: true, ..default_acc.clone() }, + )]), + true, + &mut PostState::default(), ); + // re-create account + executor.commit_changes( + hash_map::HashMap::from([( + account, + RevmAccount { is_touched: true, storage_cleared: true, ..default_acc.clone() }, + )]), + true, + &mut PostState::default(), + ); + // touch account + executor.commit_changes( + hash_map::HashMap::from([(account, RevmAccount { ..default_acc })]), + true, + &mut PostState::default(), + ); + + let db = executor.db(); + + let account = db.load_account(account).unwrap(); + assert_eq!(account.account_state, AccountState::StorageCleared); } } diff --git a/crates/executor/src/factory.rs b/crates/executor/src/factory.rs new file mode 100644 index 00000000000..5bd59d0297d --- /dev/null +++ b/crates/executor/src/factory.rs @@ -0,0 +1,34 @@ +use reth_primitives::ChainSpec; +use reth_provider::{ExecutorFactory, StateProvider}; +use reth_revm::database::{State, SubState}; + +use crate::executor::Executor; +use std::sync::Arc; + +/// Factory that spawn Executor. +#[derive(Clone, Debug)] +pub struct Factory { + chain_spec: Arc, +} + +impl Factory { + /// Create new factory + pub fn new(chain_spec: Arc) -> Self { + Self { chain_spec } + } +} + +impl ExecutorFactory for Factory { + type Executor = Executor; + + /// Executor with [`StateProvider`] + fn with_sp(&self, sp: SP) -> Self::Executor { + let substate = SubState::new(State::new(sp)); + Executor::new(self.chain_spec.clone(), substate) + } + + /// Return internal chainspec + fn chain_spec(&self) -> &ChainSpec { + self.chain_spec.as_ref() + } +} diff --git a/crates/executor/src/lib.rs b/crates/executor/src/lib.rs index 523fc7a0f0d..fbdb1cc1736 100644 --- a/crates/executor/src/lib.rs +++ b/crates/executor/src/lib.rs @@ -8,8 +8,20 @@ //! Reth executor executes transaction in block of data. pub mod eth_dao_fork; +pub mod substate; + +/// Execution result types. +pub use reth_provider::post_state; + +pub mod blockchain_tree; -/// Execution result types -pub mod execution_result; /// Executor pub mod executor; + +/// ExecutorFactory impl +pub mod factory; +pub use factory::Factory; + +#[cfg(any(test, feature = "test-utils"))] +/// Common test helpers for mocking out executor and executor factory +pub mod test_utils; diff --git a/crates/executor/src/substate.rs b/crates/executor/src/substate.rs new file mode 100644 index 00000000000..082ce140b6b --- /dev/null +++ b/crates/executor/src/substate.rs @@ -0,0 +1,101 @@ +//! Substate for blockchain trees + +use reth_interfaces::{provider::ProviderError, Result}; +use reth_primitives::{Account, Address, BlockHash, BlockNumber, Bytecode, Bytes, H256, U256}; +use reth_provider::{post_state::PostState, AccountProvider, BlockHashProvider, StateProvider}; +use std::collections::BTreeMap; + +/// A state provider that either resolves to data in a wrapped [`PostState`], or an underlying state +/// provider. +pub struct PostStateProvider<'a, SP: StateProvider> { + /// The wrapped state after execution of one or more transactions and/or blocks. + state: &'a PostState, + /// The inner state provider. + provider: SP, + /// The blocks in the sidechain. + sidechain_block_hashes: &'a BTreeMap, + /// The blocks in the canonical chain. + canonical_block_hashes: &'a BTreeMap, +} + +impl<'a, SP: StateProvider> PostStateProvider<'a, SP> { + /// Create new post-state provider + pub fn new( + state: &'a PostState, + provider: SP, + sidechain_block_hashes: &'a BTreeMap, + canonical_block_hashes: &'a BTreeMap, + ) -> Self { + Self { state, provider, sidechain_block_hashes, canonical_block_hashes } + } +} + +/* Implement StateProvider traits */ + +impl<'a, SP: StateProvider> BlockHashProvider for PostStateProvider<'a, SP> { + fn block_hash(&self, block_number: BlockNumber) -> Result> { + if let Some(sidechain_block_hash) = self.sidechain_block_hashes.get(&block_number).cloned() + { + return Ok(Some(sidechain_block_hash)) + } + + Ok(Some( + self.canonical_block_hashes + .get(&block_number) + .cloned() + .ok_or(ProviderError::BlockchainTreeBlockHash { block_number })?, + )) + } + + fn canonical_hashes_range(&self, _start: BlockNumber, _end: BlockNumber) -> Result> { + unimplemented!() + } +} + +impl<'a, SP: StateProvider> AccountProvider for PostStateProvider<'a, SP> { + fn basic_account(&self, address: Address) -> Result> { + if let Some(account) = self.state.account(&address) { + Ok(*account) + } else { + self.provider.basic_account(address) + } + } +} + +impl<'a, SP: StateProvider> StateProvider for PostStateProvider<'a, SP> { + fn storage( + &self, + account: Address, + storage_key: reth_primitives::StorageKey, + ) -> Result> { + if let Some(storage) = self.state.account_storage(&account) { + if storage.wiped { + return Ok(Some(U256::ZERO)) + } + + if let Some(value) = + storage.storage.get(&U256::from_be_bytes(storage_key.to_fixed_bytes())) + { + return Ok(Some(*value)) + } + } + + self.provider.storage(account, storage_key) + } + + fn bytecode_by_hash(&self, code_hash: H256) -> Result> { + if let Some(bytecode) = self.state.bytecode(&code_hash).cloned() { + return Ok(Some(bytecode)) + } + + self.provider.bytecode_by_hash(code_hash) + } + + fn proof( + &self, + _address: Address, + _keys: &[H256], + ) -> Result<(Vec, H256, Vec>)> { + Err(ProviderError::HistoryStateRoot.into()) + } +} diff --git a/crates/executor/src/test_utils/executor.rs b/crates/executor/src/test_utils/executor.rs new file mode 100644 index 00000000000..d09338fd5eb --- /dev/null +++ b/crates/executor/src/test_utils/executor.rs @@ -0,0 +1,26 @@ +use reth_interfaces::executor::Error as ExecutionError; +use reth_primitives::{Address, Block, U256}; +use reth_provider::{post_state::PostState, BlockExecutor, StateProvider}; + +/// Test executor with mocked result. +pub struct TestExecutor(pub Option); + +impl BlockExecutor for TestExecutor { + fn execute( + &mut self, + _block: &Block, + _total_difficulty: U256, + _senders: Option>, + ) -> Result { + self.0.clone().ok_or(ExecutionError::VerificationFailed) + } + + fn execute_and_verify_receipt( + &mut self, + _block: &Block, + _total_difficulty: U256, + _senders: Option>, + ) -> Result { + self.0.clone().ok_or(ExecutionError::VerificationFailed) + } +} diff --git a/crates/executor/src/test_utils/factory.rs b/crates/executor/src/test_utils/factory.rs new file mode 100644 index 00000000000..50e4360ad17 --- /dev/null +++ b/crates/executor/src/test_utils/factory.rs @@ -0,0 +1,37 @@ +use super::TestExecutor; +use parking_lot::Mutex; +use reth_primitives::ChainSpec; +use reth_provider::{post_state::PostState, ExecutorFactory, StateProvider}; +use std::sync::Arc; + +/// Executor factory with pre-set execution results. +#[derive(Clone, Debug)] +pub struct TestExecutorFactory { + exec_results: Arc>>, + chain_spec: Arc, +} + +impl TestExecutorFactory { + /// Create new instance of test factory. + pub fn new(chain_spec: Arc) -> Self { + Self { exec_results: Arc::new(Mutex::new(Vec::new())), chain_spec } + } + + /// Extend the mocked execution results + pub fn extend(&self, results: Vec) { + self.exec_results.lock().extend(results.into_iter()); + } +} + +impl ExecutorFactory for TestExecutorFactory { + type Executor = TestExecutor; + + fn with_sp(&self, _sp: SP) -> Self::Executor { + let exec_res = self.exec_results.lock().pop(); + TestExecutor(exec_res) + } + + fn chain_spec(&self) -> &ChainSpec { + self.chain_spec.as_ref() + } +} diff --git a/crates/executor/src/test_utils/mod.rs b/crates/executor/src/test_utils/mod.rs new file mode 100644 index 00000000000..86ef2f53aea --- /dev/null +++ b/crates/executor/src/test_utils/mod.rs @@ -0,0 +1,5 @@ +mod executor; +pub use executor::*; + +mod factory; +pub use factory::*; diff --git a/crates/interfaces/Cargo.toml b/crates/interfaces/Cargo.toml index aeb1b909c5f..d8a81d11542 100644 --- a/crates/interfaces/Cargo.toml +++ b/crates/interfaces/Cargo.toml @@ -10,7 +10,7 @@ readme = "README.md" reth-codecs = { path = "../storage/codecs" } reth-primitives = { path = "../primitives" } reth-rpc-types = { path = "../rpc/rpc-types" } -reth-network-api = { path = "../net/network-api"} +reth-network-api = { path = "../net/network-api" } revm-primitives = "1.0" async-trait = "0.1.57" thiserror = "1.0.37" @@ -22,20 +22,28 @@ reth-eth-wire = { path = "../net/eth-wire" } # codecs parity-scale-codec = { version = "3.2.1", features = ["bytes"] } -futures = "0.3.25" +futures = "0.3" tokio-stream = "0.1.11" rand = "0.8.5" arbitrary = { version = "1.1.7", features = ["derive"], optional = true } -secp256k1 = { version = "0.24.2", default-features = false, features = ["alloc", "recovery", "rand"], optional = true } +secp256k1 = { version = "0.26.0", default-features = false, features = [ + "alloc", + "recovery", + "rand", +], optional = true } modular-bitfield = "0.11.2" [dev-dependencies] reth-db = { path = "../storage/db", features = ["test-utils"] } tokio = { version = "1.21.2", features = ["full"] } tokio-stream = { version = "0.1.11", features = ["sync"] } -arbitrary = { version = "1.1.7", features = ["derive"]} +arbitrary = { version = "1.1.7", features = ["derive"] } hex-literal = "0.3" -secp256k1 = { version = "0.24.2", default-features = false, features = ["alloc", "recovery", "rand"] } +secp256k1 = { version = "0.26.0", default-features = false, features = [ + "alloc", + "recovery", + "rand", +] } [features] bench = [] diff --git a/crates/interfaces/src/consensus.rs b/crates/interfaces/src/consensus.rs index c940733b64b..5f232b67f30 100644 --- a/crates/interfaces/src/consensus.rs +++ b/crates/interfaces/src/consensus.rs @@ -1,7 +1,8 @@ use async_trait::async_trait; -use reth_primitives::{BlockHash, BlockNumber, SealedBlock, SealedHeader, H256, U256}; +use reth_primitives::{ + BlockHash, BlockNumber, InvalidTransactionError, SealedBlock, SealedHeader, H256, U256, +}; use std::fmt::Debug; -use tokio::sync::watch::Receiver; /// Re-export fork choice state pub use reth_rpc_types::engine::ForkchoiceState; @@ -10,9 +11,6 @@ pub use reth_rpc_types::engine::ForkchoiceState; #[async_trait] #[auto_impl::auto_impl(&, Arc)] pub trait Consensus: Debug + Send + Sync { - /// Get a receiver for the fork choice state - fn fork_choice_state(&self) -> Receiver; - /// Validate if the header is correct and follows consensus specification. /// /// This is called before properties that are not in the header itself (like total difficulty) @@ -23,13 +21,17 @@ pub trait Consensus: Debug + Send + Sync { &self, header: &SealedHeader, parent: &SealedHeader, - ) -> Result<(), Error>; + ) -> Result<(), ConsensusError>; /// Validate if the header is correct and follows the consensus specification, including /// computed properties (like total difficulty). /// /// Some consensus engines may want to do additional checks here. - fn validate_header(&self, header: &SealedHeader, total_difficulty: U256) -> Result<(), Error>; + fn validate_header( + &self, + header: &SealedHeader, + total_difficulty: U256, + ) -> Result<(), ConsensusError>; /// Validate a block disregarding world state, i.e. things that can be checked before sender /// recovery and execution. @@ -38,7 +40,7 @@ pub trait Consensus: Debug + Send + Sync { /// 11.1 "Ommer Validation". /// /// **This should not be called for the genesis block**. - fn pre_validate_block(&self, block: &SealedBlock) -> Result<(), Error>; + fn pre_validate_block(&self, block: &SealedBlock) -> Result<(), ConsensusError>; /// After the Merge (aka Paris) block rewards became obsolete. /// @@ -51,7 +53,7 @@ pub trait Consensus: Debug + Send + Sync { /// Consensus Errors #[allow(missing_docs)] #[derive(thiserror::Error, Debug, PartialEq, Eq, Clone)] -pub enum Error { +pub enum ConsensusError { #[error("Block used gas ({gas_used:?}) is greater than gas limit ({gas_limit:?}).")] HeaderGasUsedExceedsGasLimit { gas_used: u64, gas_limit: u64 }, #[error("Block ommer hash ({got:?}) is different from expected: ({expected:?})")] @@ -71,7 +73,7 @@ pub enum Error { #[error("Block number {block_number:?} is mismatch with parent block number {parent_block_number:?}")] ParentBlockNumberMismatch { parent_block_number: BlockNumber, block_number: BlockNumber }, #[error( - "Block timestamp {timestamp:?} is in past in comparison with parent timestamp {parent_timestamp:?}." + "Block timestamp {timestamp:?} is in past in comparison with parent timestamp {parent_timestamp:?}." )] TimestampIsInPast { parent_timestamp: u64, timestamp: u64 }, #[error("Block timestamp {timestamp:?} is in future in comparison of our clock time {present_timestamp:?}.")] @@ -84,26 +86,6 @@ pub enum Error { BaseFeeMissing, #[error("Block base fee ({got:?}) is different then expected: ({expected:?}).")] BaseFeeDiff { expected: u64, got: u64 }, - #[error("Transaction eip1559 priority fee is more then max fee.")] - TransactionPriorityFeeMoreThenMaxFee, - #[error("Transaction chain_id does not match.")] - TransactionChainId, - #[error("Transaction max fee is less them block base fee.")] - TransactionMaxFeeLessThenBaseFee, - #[error("Transaction signer does not have account.")] - SignerAccountNotExisting, - #[error("Transaction signer has bytecode set.")] - SignerAccountHasBytecode, - #[error("Transaction nonce is not consistent.")] - TransactionNonceNotConsistent, - #[error("Account does not have enough funds ({available_funds:?}) to cover transaction max fee: {max_fee:?}.")] - InsufficientFunds { max_fee: u128, available_funds: u128 }, - #[error("Eip2930 transaction is enabled after berlin hardfork.")] - TransactionEip2930Disabled, - #[error("Old legacy transaction before Spurious Dragon should not have chain_id.")] - TransactionOldLegacyChainId, - #[error("Eip2930 transaction is enabled after london hardfork.")] - TransactionEip1559Disabled, #[error("Transaction signer recovery error.")] TransactionSignerRecoveryError, #[error( @@ -130,4 +112,7 @@ pub enum Error { WithdrawalIndexInvalid { got: u64, expected: u64 }, #[error("Missing withdrawals")] BodyWithdrawalsMissing, + /// Error for a transaction that violates consensus. + #[error(transparent)] + InvalidTransaction(#[from] InvalidTransactionError), } diff --git a/crates/interfaces/src/error.rs b/crates/interfaces/src/error.rs index 6b27e906bba..746dffcb559 100644 --- a/crates/interfaces/src/error.rs +++ b/crates/interfaces/src/error.rs @@ -9,7 +9,7 @@ pub enum Error { Execution(#[from] crate::executor::Error), #[error(transparent)] - Consensus(#[from] crate::consensus::Error), + Consensus(#[from] crate::consensus::ConsensusError), #[error(transparent)] Database(#[from] crate::db::Error), diff --git a/crates/interfaces/src/executor.rs b/crates/interfaces/src/executor.rs index 489c00810e8..a40f60a0e25 100644 --- a/crates/interfaces/src/executor.rs +++ b/crates/interfaces/src/executor.rs @@ -1,32 +1,12 @@ -use async_trait::async_trait; -use reth_primitives::{Address, Block, Bloom, H256, U256}; +use reth_primitives::{BlockHash, BlockNumber, Bloom, H256}; use thiserror::Error; -/// An executor capable of executing a block. -#[async_trait] -pub trait BlockExecutor { - /// Execute a block. - /// - /// The number of `senders` should be equal to the number of transactions in the block. - /// - /// If no senders are specified, the `execute` function MUST recover the senders for the - /// provided block's transactions internally. We use this to allow for calculating senders in - /// parallel in e.g. staged sync, so that execution can happen without paying for sender - /// recovery costs. - fn execute( - &mut self, - block: &Block, - total_difficulty: U256, - senders: Option>, - ) -> Result; -} - /// BlockExecutor Errors #[allow(missing_docs)] #[derive(Error, Debug, Clone, PartialEq, Eq)] pub enum Error { - #[error("EVM reported invalid transaction:{0}")] - EVM(String), + #[error("EVM reported invalid transaction ({hash:?}): {message}")] + EVM { hash: H256, message: String }, #[error("Example of error.")] VerificationFailed, #[error("Fatal internal error")] @@ -54,4 +34,34 @@ pub enum Error { BlockGasUsed { got: u64, expected: u64 }, #[error("Provider error")] ProviderError, + #[error("BlockChainId can't be found in BlockchainTree with internal index {chain_id}")] + BlockChainIdConsistency { chain_id: u64 }, + #[error( + "Appending chain on fork (other_chain_fork:?) is not possible as the tip is {chain_tip:?}" + )] + AppendChainDoesntConnect { chain_tip: (u64, H256), other_chain_fork: (u64, H256) }, + #[error("Canonical chain header #{block_hash} can't be found ")] + CanonicalChain { block_hash: BlockHash }, + #[error("Can't insert #{block_number} {block_hash} as last finalized block number is {last_finalized}")] + PendingBlockIsFinalized { + block_hash: BlockHash, + block_number: BlockNumber, + last_finalized: BlockNumber, + }, + #[error("Can't insert block #{block_number} {block_hash} to far in future, as last finalized block number is {last_finalized}")] + PendingBlockIsInFuture { + block_hash: BlockHash, + block_number: BlockNumber, + last_finalized: BlockNumber, + }, + #[error("Block number #{block_number} not found in blockchain tree chain")] + BlockNumberNotFoundInChain { block_number: BlockNumber }, + #[error("Block hash {block_hash} not found in blockchain tree chain")] + BlockHashNotFoundInChain { block_hash: BlockHash }, + #[error("Transaction error on revert: {inner:?}")] + CanonicalRevert { inner: String }, + #[error("Transaction error on commit: {inner:?}")] + CanonicalCommit { inner: String }, + #[error("Transaction error on pipeline status update: {inner:?}")] + PipelineStatusUpdate { inner: String }, } diff --git a/crates/interfaces/src/p2p/error.rs b/crates/interfaces/src/p2p/error.rs index 92922e40b00..70e22f6cd01 100644 --- a/crates/interfaces/src/p2p/error.rs +++ b/crates/interfaces/src/p2p/error.rs @@ -125,7 +125,7 @@ pub enum DownloadError { hash: H256, /// The details of validation failure #[source] - error: consensus::Error, + error: consensus::ConsensusError, }, /// Error when checking that the current [`Header`] has the parent's hash as the parent_hash /// field, and that they have sequential block numbers. @@ -148,6 +148,14 @@ pub enum DownloadError { /// The hash of the expected tip expected: H256, }, + /// Received a tip with an invalid tip number + #[error("Received invalid tip number: {received:?}. Expected {expected:?}.")] + InvalidTipNumber { + /// The block number of the received tip + received: u64, + /// The block number of the expected tip + expected: u64, + }, /// Received a response to a request with unexpected start block #[error("Headers response starts at unexpected block: {received:?}. Expected {expected:?}.")] HeadersResponseStartBlockMismatch { @@ -172,7 +180,7 @@ pub enum DownloadError { hash: H256, /// The details of validation failure #[source] - error: consensus::Error, + error: consensus::ConsensusError, }, /// Received more bodies than requested. #[error("Received more bodies than requested. Expected: {expected}. Received: {received}")] diff --git a/crates/interfaces/src/p2p/headers/downloader.rs b/crates/interfaces/src/p2p/headers/downloader.rs index 7b129e29d80..ac10d2be79e 100644 --- a/crates/interfaces/src/p2p/headers/downloader.rs +++ b/crates/interfaces/src/p2p/headers/downloader.rs @@ -3,7 +3,7 @@ use crate::{ p2p::error::{DownloadError, DownloadResult}, }; use futures::Stream; -use reth_primitives::{SealedHeader, H256}; +use reth_primitives::{BlockHashOrNumber, SealedHeader, H256}; /// A downloader capable of fetching and yielding block headers. /// @@ -48,6 +48,8 @@ pub enum SyncTarget { /// The benefit of this variant is, that this already provides the block number of the highest /// missing block. Gap(SealedHeader), + /// This represents a tip by block number + TipNum(u64), } // === impl SyncTarget === @@ -57,10 +59,11 @@ impl SyncTarget { /// /// This returns the hash if the target is [SyncTarget::Tip] or the `parent_hash` of the given /// header in [SyncTarget::Gap] - pub fn tip(&self) -> H256 { + pub fn tip(&self) -> BlockHashOrNumber { match self { - SyncTarget::Tip(tip) => *tip, - SyncTarget::Gap(gap) => gap.parent_hash, + SyncTarget::Tip(tip) => (*tip).into(), + SyncTarget::Gap(gap) => gap.parent_hash.into(), + SyncTarget::TipNum(num) => (*num).into(), } } } diff --git a/crates/interfaces/src/provider.rs b/crates/interfaces/src/provider.rs index be00f651271..33ace26d1ea 100644 --- a/crates/interfaces/src/provider.rs +++ b/crates/interfaces/src/provider.rs @@ -68,7 +68,24 @@ pub enum ProviderError { /// Reached the end of the transaction sender table. #[error("Got to the end of the transaction sender table")] EndOfTransactionSenderTable, + /// Missing block hash in BlockchainTree + #[error("Missing block hash for block #{block_number:?} in blockchain tree")] + BlockchainTreeBlockHash { block_number: BlockNumber }, + /// Some error occurred while interacting with the state tree. + #[error("Unknown error occurred while interacting with the state trie.")] + StateTrie, + #[error("History state root, can't be calculated")] + HistoryStateRoot, /// Thrown when required header related data was not found but was required. #[error("requested data not found")] HeaderNotFound, + /// Mismatch of sender and transaction + #[error("Mismatch of sender and transaction id {tx_id}")] + MismatchOfTransactionAndSenderId { tx_id: TxNumber }, + /// Block body wrong transaction count + #[error("Stored block indices does not match transaction count")] + BlockBodyTransactionCount, + /// Thrown when the cache service task dropped + #[error("cache service task stopped")] + CacheServiceUnavailable, } diff --git a/crates/interfaces/src/test_utils/generators.rs b/crates/interfaces/src/test_utils/generators.rs index 4d0b7fa07f4..8d8e3bd4802 100644 --- a/crates/interfaces/src/test_utils/generators.rs +++ b/crates/interfaces/src/test_utils/generators.rs @@ -1,9 +1,10 @@ -use rand::{distributions::uniform::SampleRange, thread_rng, Rng}; +use rand::{distributions::uniform::SampleRange, seq::SliceRandom, thread_rng, Rng}; use reth_primitives::{ - proofs, Account, Address, Bytes, Header, SealedBlock, SealedHeader, Signature, Transaction, - TransactionKind, TransactionSigned, TxLegacy, H160, H256, U256, + proofs, sign_message, Account, Address, Bytes, Header, SealedBlock, SealedHeader, Signature, + StorageEntry, Transaction, TransactionKind, TransactionSigned, TxLegacy, H160, H256, U256, }; -use secp256k1::{KeyPair, Message as SecpMessage, Secp256k1, SecretKey}; +use secp256k1::{KeyPair, Message as SecpMessage, Secp256k1, SecretKey, SECP256K1}; +use std::{collections::BTreeMap, ops::Sub}; // TODO(onbjerg): Maybe we should split this off to its own crate, or move the helpers to the // relevant crates? @@ -71,21 +72,6 @@ pub fn random_signed_tx() -> TransactionSigned { TransactionSigned::from_transaction_and_signature(tx, signature) } -/// Signs message with the given secret key. -/// Returns the corresponding signature. -pub fn sign_message(secret: H256, message: H256) -> Result { - let secp = Secp256k1::new(); - let sec = SecretKey::from_slice(secret.as_ref())?; - let s = secp.sign_ecdsa_recoverable(&SecpMessage::from_slice(&message[..])?, &sec); - let (rec_id, data) = s.serialize_compact(); - - Ok(Signature { - r: U256::try_from_be_slice(&data[..32]).unwrap(), - s: U256::try_from_be_slice(&data[32..64]).unwrap(), - odd_y_parity: rec_id.to_i32() != 0, - }) -} - /// Generate a random block filled with signed transactions (generated using /// [random_signed_tx]). If no transaction count is provided, the number of transactions /// will be random, otherwise the provided count will be used. @@ -165,6 +151,115 @@ pub fn random_block_range( blocks } +type Transition = Vec<(Address, Account, Vec)>; +type AccountState = (Account, Vec); + +/// Generate a range of transitions for given blocks and accounts. +/// Assumes all accounts start with an empty storage. +/// +/// Returns a Vec of account and storage changes for each transition, +/// along with the final state of all accounts and storages. +pub fn random_transition_range<'a, IBlk, IAcc>( + blocks: IBlk, + accounts: IAcc, + n_changes: std::ops::Range, + key_range: std::ops::Range, +) -> (Vec, BTreeMap) +where + IBlk: IntoIterator, + IAcc: IntoIterator))>, +{ + let mut rng = rand::thread_rng(); + let mut state: BTreeMap<_, _> = accounts + .into_iter() + .map(|(addr, (acc, st))| (addr, (acc, st.into_iter().map(|e| (e.key, e.value)).collect()))) + .collect(); + + let valid_addresses = state.keys().copied().collect(); + + let num_transitions: usize = blocks.into_iter().map(|block| block.body.len()).sum(); + let mut transitions = Vec::with_capacity(num_transitions); + + (0..num_transitions).for_each(|i| { + let mut transition = Vec::new(); + let (from, to, mut transfer, new_entries) = + random_account_change(&valid_addresses, n_changes.clone(), key_range.clone()); + + // extract from sending account + let (prev_from, _) = state.get_mut(&from).unwrap(); + transition.push((from, *prev_from, Vec::new())); + + transfer = transfer.min(prev_from.balance).max(U256::from(1)); + prev_from.balance = prev_from.balance.wrapping_sub(transfer); + + // deposit in receiving account and update storage + let (prev_to, storage): &mut (Account, BTreeMap) = state.get_mut(&to).unwrap(); + + let old_entries = new_entries + .into_iter() + .filter_map(|entry| { + let old = if entry.value != U256::ZERO { + storage.insert(entry.key, entry.value) + } else { + let old = storage.remove(&entry.key); + if matches!(old, Some(U256::ZERO)) { + return None + } + old + }; + Some(StorageEntry { value: old.unwrap_or(U256::from(0)), ..entry }) + }) + .collect(); + + transition.push((to, *prev_to, old_entries)); + + prev_to.balance = prev_to.balance.wrapping_add(transfer); + + transitions.push(transition); + }); + + let final_state = state + .into_iter() + .map(|(addr, (acc, storage))| { + (addr, (acc, storage.into_iter().map(|v| v.into()).collect())) + }) + .collect(); + (transitions, final_state) +} + +/// Generate a random account change. +/// +/// Returns two addresses, a balance_change, and a Vec of new storage entries. +pub fn random_account_change( + valid_addresses: &Vec
, + n_changes: std::ops::Range, + key_range: std::ops::Range, +) -> (Address, Address, U256, Vec) { + let mut rng = rand::thread_rng(); + let mut addresses = valid_addresses.choose_multiple(&mut rng, 2).cloned(); + + let addr_from = addresses.next().unwrap_or_else(Address::random); + let addr_to = addresses.next().unwrap_or_else(Address::random); + + let balance_change = U256::from(rng.gen::()); + + let storage_changes = (0..n_changes.sample_single(&mut rng)) + .map(|_| random_storage_entry(key_range.clone())) + .collect(); + + (addr_from, addr_to, balance_change, storage_changes) +} + +/// Generate a random storage change. +pub fn random_storage_entry(key_range: std::ops::Range) -> StorageEntry { + let mut rng = rand::thread_rng(); + + let key = H256::from_low_u64_be(key_range.sample_single(&mut rng)); + let value = U256::from(rng.gen::()); + + StorageEntry { key, value } +} + /// Generate random Externaly Owned Account (EOA account without contract). pub fn random_eoa_account() -> (Address, Account) { let nonce: u64 = rand::random(); diff --git a/crates/interfaces/src/test_utils/headers.rs b/crates/interfaces/src/test_utils/headers.rs index eaa8f804de3..ccb8bcd59b7 100644 --- a/crates/interfaces/src/test_utils/headers.rs +++ b/crates/interfaces/src/test_utils/headers.rs @@ -1,6 +1,6 @@ //! Testing support for headers related interfaces. use crate::{ - consensus::{self, Consensus, Error}, + consensus::{self, Consensus, ConsensusError}, p2p::{ download::DownloadClient, error::{DownloadError, DownloadResult, PeerRequestResult, RequestError}, @@ -259,22 +259,13 @@ impl HeadersClient for TestHeadersClient { /// Consensus engine implementation for testing #[derive(Debug)] pub struct TestConsensus { - /// Watcher over the forkchoice state - channel: (watch::Sender, watch::Receiver), /// Flag whether the header validation should purposefully fail fail_validation: AtomicBool, } impl Default for TestConsensus { fn default() -> Self { - Self { - channel: watch::channel(ForkchoiceState { - head_block_hash: H256::zero(), - finalized_block_hash: H256::zero(), - safe_block_hash: H256::zero(), - }), - fail_validation: AtomicBool::new(false), - } + Self { fail_validation: AtomicBool::new(false) } } } @@ -288,14 +279,6 @@ impl TestConsensus { pub fn set_fail_validation(&self, val: bool) { self.fail_validation.store(val, Ordering::SeqCst) } - - /// Update the forkchoice state. - pub fn notify_fork_choice_state( - &self, - state: ForkchoiceState, - ) -> Result<(), SendError> { - self.channel.0.send(state) - } } /// Status updater for testing. @@ -323,33 +306,33 @@ impl StatusUpdater for TestStatusUpdater { #[async_trait::async_trait] impl Consensus for TestConsensus { - fn fork_choice_state(&self) -> watch::Receiver { - self.channel.1.clone() - } - fn pre_validate_header( &self, header: &SealedHeader, parent: &SealedHeader, - ) -> Result<(), Error> { + ) -> Result<(), ConsensusError> { if self.fail_validation() { - Err(consensus::Error::BaseFeeMissing) + Err(consensus::ConsensusError::BaseFeeMissing) } else { Ok(()) } } - fn validate_header(&self, header: &SealedHeader, total_difficulty: U256) -> Result<(), Error> { + fn validate_header( + &self, + header: &SealedHeader, + total_difficulty: U256, + ) -> Result<(), ConsensusError> { if self.fail_validation() { - Err(consensus::Error::BaseFeeMissing) + Err(consensus::ConsensusError::BaseFeeMissing) } else { Ok(()) } } - fn pre_validate_block(&self, _block: &SealedBlock) -> Result<(), consensus::Error> { + fn pre_validate_block(&self, _block: &SealedBlock) -> Result<(), consensus::ConsensusError> { if self.fail_validation() { - Err(consensus::Error::BaseFeeMissing) + Err(consensus::ConsensusError::BaseFeeMissing) } else { Ok(()) } diff --git a/crates/metrics/metrics-derive/tests/metrics.rs b/crates/metrics/metrics-derive/tests/metrics.rs index 8f260761301..cca5ef83ca5 100644 --- a/crates/metrics/metrics-derive/tests/metrics.rs +++ b/crates/metrics/metrics-derive/tests/metrics.rs @@ -303,21 +303,21 @@ impl Recorder for TestRecorder { fn register_counter(&self, key: &Key) -> Counter { let labels_vec: Vec